var/home/core/zuul-output/0000755000175000017500000000000015071143311014521 5ustar corecorevar/home/core/zuul-output/logs/0000755000175000017500000000000015071154315015473 5ustar corecorevar/home/core/zuul-output/logs/kubelet.log0000644000000000000000005157355715071154307017722 0ustar rootrootOct 07 07:56:13 crc systemd[1]: Starting Kubernetes Kubelet... Oct 07 07:56:13 crc restorecon[4682]: Relabeled /var/lib/kubelet/config.json from system_u:object_r:unlabeled_t:s0 to system_u:object_r:container_var_lib_t:s0 Oct 07 07:56:13 crc restorecon[4682]: /var/lib/kubelet/device-plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 07 07:56:13 crc restorecon[4682]: /var/lib/kubelet/device-plugins/kubelet.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 07 07:56:13 crc restorecon[4682]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/volumes/kubernetes.io~configmap/nginx-conf/..2025_02_23_05_40_35.4114275528/nginx.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Oct 07 07:56:13 crc restorecon[4682]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Oct 07 07:56:13 crc restorecon[4682]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/22e96971 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Oct 07 07:56:13 crc restorecon[4682]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/21c98286 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Oct 07 07:56:13 crc restorecon[4682]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/0f1869e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Oct 07 07:56:13 crc restorecon[4682]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Oct 07 07:56:13 crc restorecon[4682]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/46889d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Oct 07 07:56:13 crc restorecon[4682]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/5b6a5969 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Oct 07 07:56:13 crc restorecon[4682]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/6c7921f5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Oct 07 07:56:13 crc restorecon[4682]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4804f443 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Oct 07 07:56:13 crc restorecon[4682]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/2a46b283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Oct 07 07:56:13 crc restorecon[4682]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/a6b5573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Oct 07 07:56:13 crc restorecon[4682]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4f88ee5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Oct 07 07:56:13 crc restorecon[4682]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/5a4eee4b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Oct 07 07:56:13 crc restorecon[4682]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/cd87c521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Oct 07 07:56:13 crc restorecon[4682]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 07 07:56:13 crc restorecon[4682]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 07 07:56:13 crc restorecon[4682]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 07 07:56:13 crc restorecon[4682]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 07 07:56:13 crc restorecon[4682]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 07 07:56:13 crc restorecon[4682]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 07 07:56:13 crc restorecon[4682]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/38602af4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 07 07:56:13 crc restorecon[4682]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/1483b002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 07 07:56:13 crc restorecon[4682]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/0346718b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 07 07:56:13 crc restorecon[4682]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/d3ed4ada not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 07 07:56:13 crc restorecon[4682]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/3bb473a5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 07 07:56:13 crc restorecon[4682]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/8cd075a9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 07 07:56:13 crc restorecon[4682]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/00ab4760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 07 07:56:13 crc restorecon[4682]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/54a21c09 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 07 07:56:13 crc restorecon[4682]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Oct 07 07:56:13 crc restorecon[4682]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/70478888 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Oct 07 07:56:13 crc restorecon[4682]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/43802770 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Oct 07 07:56:13 crc restorecon[4682]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/955a0edc not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Oct 07 07:56:13 crc restorecon[4682]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/bca2d009 not reset as customized by admin to system_u:object_r:container_file_t:s0:c140,c1009 Oct 07 07:56:13 crc restorecon[4682]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/b295f9bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Oct 07 07:56:13 crc restorecon[4682]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 07 07:56:13 crc restorecon[4682]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 07 07:56:13 crc restorecon[4682]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 07 07:56:13 crc restorecon[4682]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 07 07:56:13 crc restorecon[4682]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 07 07:56:13 crc restorecon[4682]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 07 07:56:13 crc restorecon[4682]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 07 07:56:13 crc restorecon[4682]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 07 07:56:13 crc restorecon[4682]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 07 07:56:13 crc restorecon[4682]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 07 07:56:13 crc restorecon[4682]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 07 07:56:13 crc restorecon[4682]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/bc46ea27 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Oct 07 07:56:13 crc restorecon[4682]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5731fc1b not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Oct 07 07:56:13 crc restorecon[4682]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5e1b2a3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 07 07:56:13 crc restorecon[4682]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/943f0936 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Oct 07 07:56:13 crc restorecon[4682]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/3f764ee4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Oct 07 07:56:13 crc restorecon[4682]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/8695e3f9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 07 07:56:13 crc restorecon[4682]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/aed7aa86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Oct 07 07:56:13 crc restorecon[4682]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/c64d7448 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Oct 07 07:56:13 crc restorecon[4682]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/0ba16bd2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 07 07:56:13 crc restorecon[4682]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/207a939f not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Oct 07 07:56:13 crc restorecon[4682]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/54aa8cdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Oct 07 07:56:13 crc restorecon[4682]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/1f5fa595 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 07 07:56:13 crc restorecon[4682]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/bf9c8153 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Oct 07 07:56:13 crc restorecon[4682]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/47fba4ea not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Oct 07 07:56:13 crc restorecon[4682]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/7ae55ce9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 07 07:56:13 crc restorecon[4682]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7906a268 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Oct 07 07:56:13 crc restorecon[4682]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/ce43fa69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Oct 07 07:56:13 crc restorecon[4682]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7fc7ea3a not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 07 07:56:13 crc restorecon[4682]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/d8c38b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Oct 07 07:56:13 crc restorecon[4682]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/9ef015fb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Oct 07 07:56:13 crc restorecon[4682]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/b9db6a41 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 07 07:56:13 crc restorecon[4682]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Oct 07 07:56:13 crc restorecon[4682]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/b1733d79 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Oct 07 07:56:13 crc restorecon[4682]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/afccd338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Oct 07 07:56:13 crc restorecon[4682]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/9df0a185 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Oct 07 07:56:13 crc restorecon[4682]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/18938cf8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Oct 07 07:56:13 crc restorecon[4682]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/7ab4eb23 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Oct 07 07:56:13 crc restorecon[4682]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/56930be6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Oct 07 07:56:13 crc restorecon[4682]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 07 07:56:13 crc restorecon[4682]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_35.630010865 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 07 07:56:13 crc restorecon[4682]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 07 07:56:13 crc restorecon[4682]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 07 07:56:13 crc restorecon[4682]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 07 07:56:13 crc restorecon[4682]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 07 07:56:13 crc restorecon[4682]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 07 07:56:13 crc restorecon[4682]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 07 07:56:13 crc restorecon[4682]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 07 07:56:13 crc restorecon[4682]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/0d8e3722 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Oct 07 07:56:13 crc restorecon[4682]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/d22b2e76 not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Oct 07 07:56:13 crc restorecon[4682]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/e036759f not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/2734c483 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/57878fe7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/3f3c2e58 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/375bec3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/7bc41e08 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/48c7a72d not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/4b66701f not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/a5a1c202 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_40.1388695756 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/26f3df5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/6d8fb21d not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/50e94777 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208473b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/ec9e08ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3b787c39 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208eaed5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/93aa3a2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3c697968 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/ba950ec9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/cb5cdb37 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/f2df9827 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/fedaa673 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/9ca2df95 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/b2d7460e not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2207853c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/241c1c29 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2d910eaf not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/c6c0f2e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/399edc97 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8049f7cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/0cec5484 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/312446d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c406,c828 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8e56a35d not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/2d30ddb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/eca8053d not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/c3a25c9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c168,c522 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/b9609c22 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/e8b0eca9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/b36a9c3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/38af7b07 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/ae821620 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/baa23338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/2c534809 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/59b29eae not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/c91a8e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/4d87494a not reset as customized by admin to system_u:object_r:container_file_t:s0:c442,c857 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/1e33ca63 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/8dea7be2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d0b04a99 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d84f01e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/4109059b not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/a7258a3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/05bdf2b6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/f3261b51 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/315d045e not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/5fdcf278 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/d053f757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/c2850dc7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fcfb0b2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c7ac9b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fa0c0d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c609b6ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/2be6c296 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/89a32653 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/4eb9afeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/13af6efa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/b03f9724 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/e3d105cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/3aed4d83 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/0765fa6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/2cefc627 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/3dcc6345 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/365af391 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b1130c0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/236a5913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b9432e26 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/5ddb0e3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/986dc4fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/8a23ff9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/9728ae68 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/665f31d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/136c9b42 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/98a1575b not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/cac69136 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/5deb77a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/2ae53400 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/e46f2326 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/dc688d3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/3497c3cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/177eb008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/af5a2afa not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/d780cb1f not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/49b0f374 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/26fbb125 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/cf14125a not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/b7f86972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/e51d739c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/88ba6a69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/669a9acf not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/5cd51231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/75349ec7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/15c26839 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/45023dcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/2bb66a50 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/64d03bdd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/ab8e7ca0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/bb9be25f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/9a0b61d3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/d471b9d2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/8cb76b8e not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/11a00840 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/ec355a92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/992f735e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d59cdbbc not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/72133ff0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/c56c834c not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d13724c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/0a498258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa471982 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fc900d92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa7d68da not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/4bacf9b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/424021b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/fc2e31a3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/f51eefac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/c8997f2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/7481f599 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/fdafea19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/d0e1c571 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/ee398915 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/682bb6b8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a3e67855 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a989f289 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/915431bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/7796fdab not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/dcdb5f19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/a3aaa88c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/5508e3e6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/160585de not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/e99f8da3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/8bc85570 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/a5861c91 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/84db1135 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/9e1a6043 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/c1aba1c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/d55ccd6d not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/971cc9f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/8f2e3dcf not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/ceb35e9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/1c192745 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/5209e501 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/f83de4df not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/e7b978ac not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/c64304a1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/5384386b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/cce3e3ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/8fb75465 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/740f573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/32fd1134 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/0a861bd3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/80363026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/bfa952a8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..2025_02_23_05_33_31.333075221 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/793bf43d not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/7db1bb6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/4f6a0368 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/c12c7d86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/36c4a773 not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/4c1e98ae not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/a4c8115c not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/setup/7db1802e not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver/a008a7ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-syncer/2c836bac not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-regeneration-controller/0ce62299 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-insecure-readyz/945d2457 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-check-endpoints/7d5c1dd8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/index.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/bundle-v1.15.0.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/channel.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/package.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/bc8d0691 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/6b76097a not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/34d1af30 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/312ba61c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/645d5dd1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/16e825f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/4cf51fc9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/2a23d348 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/075dbd49 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/dd585ddd not reset as customized by admin to system_u:object_r:container_file_t:s0:c377,c642 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/17ebd0ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c343 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/005579f4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_23_11.1287037894 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/bf5f3b9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/af276eb7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/ea28e322 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/692e6683 not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/871746a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/4eb2e958 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/ca9b62da not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/0edd6fce not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/containers/controller-manager/89b4555f not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/655fcd71 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/0d43c002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/e68efd17 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/9acf9b65 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/5ae3ff11 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/1e59206a not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/27af16d1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c304,c1017 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/7918e729 not reset as customized by admin to system_u:object_r:container_file_t:s0:c853,c893 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/5d976d0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c585,c981 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/d7f55cbb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/f0812073 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/1a56cbeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/7fdd437e not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/cdfb5652 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/fix-audit-permissions/fb93119e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver/f1e8fc0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver-check-endpoints/218511f3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server/serving-certs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/ca8af7b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/72cc8a75 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/6e8a3760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4c3455c0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/2278acb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4b453e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/3ec09bda not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2/cacerts.bin not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java/cacerts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl/ca-bundle.trust.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/email-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/objsign-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2ae6433e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fde84897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75680d2e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/openshift-service-serving-signer_1740288168.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/facfc4fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f5a969c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CFCA_EV_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9ef4a08a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ingress-operator_1740288202.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2f332aed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/248c8271.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d10a21f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ACCVRAIZ1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a94d09e5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c9a4d3b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40193066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd8c0d63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b936d1c6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CA_Disig_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4fd49c6c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM_SERVIDORES_SEGUROS.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b81b93f0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f9a69fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b30d5fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ANF_Secure_Server_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b433981b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93851c9e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9282e51c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7dd1bc4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Actalis_Authentication_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/930ac5d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f47b495.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e113c810.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5931b5bc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Commercial.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2b349938.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e48193cf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/302904dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a716d4ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Networking.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93bc0acc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/86212b19.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b727005e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbc54cab.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f51bb24c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c28a8a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9c8dfbd4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ccc52f49.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cb1c3204.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ce5e74ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd08c599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6d41d539.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb5fa911.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e35234b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8cb5ee0f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a7c655d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f8fc53da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/de6d66f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d41b5e2a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/41a3f684.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1df5a75f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_2011.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e36a6752.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b872f2b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9576d26b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/228f89db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_ECC_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb717492.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d21b73c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b1b94ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/595e996b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_RSA_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b46e03d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/128f4b91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_3_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81f2d2b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Autoridad_de_Certificacion_Firmaprofesional_CIF_A62634068.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3bde41ac.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d16a5865.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_EC-384_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0179095f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ffa7f1eb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9482e63a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4dae3dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e359ba6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7e067d03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/95aff9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7746a63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Baltimore_CyberTrust_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/653b494a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3ad48a91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_2_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/54657681.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/82223c44.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8de2f56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d9dafe4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d96b65e2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee64a828.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40547a79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5a3f0ff8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a780d93.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/34d996fb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/eed8c118.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/89c02a45.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b1159c4c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d6325660.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4c339cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8312c4c1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_E1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8508e720.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5fdd185d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48bec511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/69105f4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b9bc432.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/32888f65.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b03dec0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/219d9499.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5acf816d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbf06781.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc99f41e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AAA_Certificate_Services.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/985c1f52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8794b4e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_BR_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7c037b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ef954a4e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_EV_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2add47b6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/90c5a3c8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0f3e76e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/53a1b57a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_EV_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5ad8a5d6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/68dd7389.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d04f354.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d6437c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/062cdee6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bd43e1dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7f3d5d1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c491639e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3513523f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/399e7759.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/feffd413.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d18e9066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/607986c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c90bc37d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1b0f7e5c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e08bfd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dd8e9d41.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed39abd0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a3418fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bc3f2570.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_High_Assurance_EV_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/244b5494.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81b9768f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4be590e0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_ECC_P384_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9846683b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/252252d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e8e7201.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_RSA4096_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d52c538d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c44cc0c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Trusted_Root_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75d1b2ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a2c66da8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ecccd8db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust.net_Certification_Authority__2048_.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/aee5f10d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e7271e8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0e59380.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4c3982f2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b99d060.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf64f35b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0a775a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/002c0b4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cc450945.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_EC1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/106f3e4d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b3fb433b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4042bcee.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/02265526.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/455f1b52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0d69c7e1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9f727ac7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5e98733a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0cd152c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc4d6a89.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6187b673.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/FIRMAPROFESIONAL_CA_ROOT-A_WEB.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ba8887ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/068570d1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f081611a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48a195d8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GDCA_TrustAUTH_R5_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f6fa695.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab59055e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b92fd57f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GLOBALTRUST_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fa5da96b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ec40989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7719f463.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1001acf7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f013ecaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/626dceaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c559d742.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1d3472b9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9479c8c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a81e292b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4bfab552.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e071171e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/57bcb2da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_ECC_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab5346f4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5046c355.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_RSA_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/865fbdf9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da0cfd1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/85cde254.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_ECC_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbb3f32b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureSign_RootCA11.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5860aaa6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/31188b5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HiPKI_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c7f1359b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f15c80c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hongkong_Post_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/09789157.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/18856ac4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e09d511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Commercial_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cf701eeb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d06393bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Public_Sector_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/10531352.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Izenpe.com.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureTrust_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0ed035a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsec_e-Szigno_Root_CA_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8160b96c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8651083.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2c63f966.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_ECC_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d89cda1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/01419da9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_RSA_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7a5b843.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_RSA_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf53fb88.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9591a472.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3afde786.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Gold_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NAVER_Global_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3fb36b73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d39b0a2c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a89d74c2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd58d51e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7db1890.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NetLock_Arany__Class_Gold__F__tan__s__tv__ny.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/988a38cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/60afe812.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f39fc864.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5443e9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GB_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e73d606e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dfc0fe80.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b66938e9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e1eab7c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GC_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/773e07ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c899c73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d59297b8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ddcda989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_1_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/749e9e03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/52b525c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7e8dc79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a819ef2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/08063a00.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b483515.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/064e0aa9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1f58a078.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6f7454b3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7fa05551.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76faf6c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9339512a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f387163d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee37c333.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e18bfb83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e442e424.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fe8a2cd8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/23f4c490.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5cd81ad7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0c70a8d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7892ad52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SZAFIR_ROOT_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4f316efb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_RSA_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/06dc52d5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/583d0756.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0bf05006.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/88950faa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9046744a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c860d51.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_RSA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6fa5da56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/33ee480d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Secure_Global_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/63a2c897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_ECC_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bdacca6f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ff34af3f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbff3a01.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_ECC_RootCA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_C1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/406c9bb1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_C3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Services_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Silver_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/99e1b953.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/14bc7599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TUBITAK_Kamu_SM_SSL_Kok_Sertifikasi_-_Surum_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a3adc42.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f459871d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_ECC_Root_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_RSA_Root_2023.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TeliaSonera_Root_CA_v1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telia_Root_CA_v2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f103249.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f058632f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-certificates.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9bf03295.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/98aaf404.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1cef98f5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/073bfcc5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2923b3f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f249de83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/edcbddb5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P256_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b5697b0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ae85e5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b74d2bd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P384_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d887a5bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9aef356c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TunTrust_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd64f3fc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e13665f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Extended_Validation_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f5dc4f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da7377f6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Global_G2_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c01eb047.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/304d27c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed858448.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f30dd6ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/04f60c28.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_ECC_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fc5a8f99.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/35105088.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee532fd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/XRamp_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/706f604c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76579174.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d86cdd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/882de061.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f618aec.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a9d40e02.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e-Szigno_Root_CA_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e868b802.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/83e9984f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ePKI_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca6e4ad9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d6523ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4b718d9b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/869fbf79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/containers/registry/f8d22bdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/6e8bbfac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/54dd7996 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/a4f1bb05 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/207129da not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/c1df39e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/15b8f1cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/77bd6913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/2382c1b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/704ce128 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/70d16fe0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/bfb95535 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/57a8e8e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/1b9d3e5e not reset as customized by admin to system_u:object_r:container_file_t:s0:c107,c917 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/fddb173c not reset as customized by admin to system_u:object_r:container_file_t:s0:c202,c983 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/95d3c6c4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/bfb5fff5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/2aef40aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/c0391cad not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/1119e69d not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/660608b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/8220bd53 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/85f99d5c not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/4b0225f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/9c2a3394 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/e820b243 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/1ca52ea0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/e6988e45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/6655f00b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/98bc3986 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/08e3458a not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/2a191cb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/6c4eeefb not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/f61a549c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/24891863 not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/fbdfd89c not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/9b63b3bc not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/8acde6d6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/node-driver-registrar/59ecbba3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/csi-provisioner/685d4be3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/containers/route-controller-manager/feaea55e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/63709497 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/d966b7fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/f5773757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/81c9edb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/57bf57ee not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/86f5e6aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/0aabe31d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/d2af85c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/09d157d9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c0fe7256 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c30319e4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/e6b1dd45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/2bb643f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/920de426 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/70fa1e87 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/a1c12a2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/9442e6c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/5b45ec72 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/3c9f3a59 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/1091c11b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/9a6821c6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/ec0c35e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/517f37e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/6214fe78 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/ba189c8b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/351e4f31 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/c0f219ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/8069f607 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/559c3d82 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/605ad488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/148df488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/3bf6dcb4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/022a2feb not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/938c3924 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/729fe23e not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/1fd5cbd4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/a96697e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/e155ddca not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/10dd0e0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/6f2c8392 not reset as customized by admin to system_u:object_r:container_file_t:s0:c267,c588 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/bd241ad9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/plugins/csi-hostpath not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/plugins/csi-hostpath/csi.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/plugins/kubernetes.io not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/plugins/kubernetes.io/csi not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983 not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/vol_data.json not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 07 07:56:14 crc restorecon[4682]: /var/lib/kubelet/plugins_registry not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 07 07:56:14 crc restorecon[4682]: Relabeled /var/usrlocal/bin/kubenswrapper from system_u:object_r:bin_t:s0 to system_u:object_r:kubelet_exec_t:s0 Oct 07 07:56:15 crc kubenswrapper[4875]: Flag --container-runtime-endpoint has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Oct 07 07:56:15 crc kubenswrapper[4875]: Flag --minimum-container-ttl-duration has been deprecated, Use --eviction-hard or --eviction-soft instead. Will be removed in a future version. Oct 07 07:56:15 crc kubenswrapper[4875]: Flag --volume-plugin-dir has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Oct 07 07:56:15 crc kubenswrapper[4875]: Flag --register-with-taints has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Oct 07 07:56:15 crc kubenswrapper[4875]: Flag --pod-infra-container-image has been deprecated, will be removed in a future release. Image garbage collector will get sandbox image information from CRI. Oct 07 07:56:15 crc kubenswrapper[4875]: Flag --system-reserved has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.484550 4875 server.go:211] "--pod-infra-container-image will not be pruned by the image garbage collector in kubelet and should also be set in the remote runtime" Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.492121 4875 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.492152 4875 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.492158 4875 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.492162 4875 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.492166 4875 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.492170 4875 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.492175 4875 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.492179 4875 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.492183 4875 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.492187 4875 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.492191 4875 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.492194 4875 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.492198 4875 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.492201 4875 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.492205 4875 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.492208 4875 feature_gate.go:330] unrecognized feature gate: SignatureStores Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.492212 4875 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.492216 4875 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.492220 4875 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.492234 4875 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.492238 4875 feature_gate.go:330] unrecognized feature gate: InsightsConfig Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.492243 4875 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.492247 4875 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.492251 4875 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.492255 4875 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.492259 4875 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.492263 4875 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.492267 4875 feature_gate.go:330] unrecognized feature gate: PinnedImages Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.492271 4875 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.492275 4875 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.492280 4875 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.492285 4875 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.492289 4875 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.492293 4875 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.492296 4875 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.492301 4875 feature_gate.go:330] unrecognized feature gate: PlatformOperators Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.492305 4875 feature_gate.go:330] unrecognized feature gate: OVNObservability Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.492308 4875 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.492312 4875 feature_gate.go:330] unrecognized feature gate: Example Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.492315 4875 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.492319 4875 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.492323 4875 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.492326 4875 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.492330 4875 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.492333 4875 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.492337 4875 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.492341 4875 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.492345 4875 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.492349 4875 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.492354 4875 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.492358 4875 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.492362 4875 feature_gate.go:330] unrecognized feature gate: NewOLM Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.492367 4875 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.492372 4875 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.492376 4875 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.492380 4875 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.492383 4875 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.492387 4875 feature_gate.go:330] unrecognized feature gate: GatewayAPI Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.492391 4875 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.492394 4875 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.492397 4875 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.492401 4875 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.492405 4875 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.492410 4875 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.492414 4875 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.492418 4875 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.492423 4875 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.492427 4875 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.492432 4875 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.492436 4875 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.492441 4875 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.492547 4875 flags.go:64] FLAG: --address="0.0.0.0" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.492559 4875 flags.go:64] FLAG: --allowed-unsafe-sysctls="[]" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.492567 4875 flags.go:64] FLAG: --anonymous-auth="true" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.492573 4875 flags.go:64] FLAG: --application-metrics-count-limit="100" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.492579 4875 flags.go:64] FLAG: --authentication-token-webhook="false" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.492583 4875 flags.go:64] FLAG: --authentication-token-webhook-cache-ttl="2m0s" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.492590 4875 flags.go:64] FLAG: --authorization-mode="AlwaysAllow" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.492598 4875 flags.go:64] FLAG: --authorization-webhook-cache-authorized-ttl="5m0s" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.492603 4875 flags.go:64] FLAG: --authorization-webhook-cache-unauthorized-ttl="30s" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.492607 4875 flags.go:64] FLAG: --boot-id-file="/proc/sys/kernel/random/boot_id" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.492611 4875 flags.go:64] FLAG: --bootstrap-kubeconfig="/etc/kubernetes/kubeconfig" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.492616 4875 flags.go:64] FLAG: --cert-dir="/var/lib/kubelet/pki" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.492621 4875 flags.go:64] FLAG: --cgroup-driver="cgroupfs" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.492625 4875 flags.go:64] FLAG: --cgroup-root="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.492630 4875 flags.go:64] FLAG: --cgroups-per-qos="true" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.492634 4875 flags.go:64] FLAG: --client-ca-file="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.492638 4875 flags.go:64] FLAG: --cloud-config="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.492642 4875 flags.go:64] FLAG: --cloud-provider="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.492647 4875 flags.go:64] FLAG: --cluster-dns="[]" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.492653 4875 flags.go:64] FLAG: --cluster-domain="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.492657 4875 flags.go:64] FLAG: --config="/etc/kubernetes/kubelet.conf" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.492661 4875 flags.go:64] FLAG: --config-dir="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.492665 4875 flags.go:64] FLAG: --container-hints="/etc/cadvisor/container_hints.json" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.492669 4875 flags.go:64] FLAG: --container-log-max-files="5" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.492675 4875 flags.go:64] FLAG: --container-log-max-size="10Mi" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.492679 4875 flags.go:64] FLAG: --container-runtime-endpoint="/var/run/crio/crio.sock" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.492684 4875 flags.go:64] FLAG: --containerd="/run/containerd/containerd.sock" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.492688 4875 flags.go:64] FLAG: --containerd-namespace="k8s.io" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.492692 4875 flags.go:64] FLAG: --contention-profiling="false" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.492696 4875 flags.go:64] FLAG: --cpu-cfs-quota="true" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.492702 4875 flags.go:64] FLAG: --cpu-cfs-quota-period="100ms" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.492707 4875 flags.go:64] FLAG: --cpu-manager-policy="none" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.492711 4875 flags.go:64] FLAG: --cpu-manager-policy-options="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.492717 4875 flags.go:64] FLAG: --cpu-manager-reconcile-period="10s" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.492722 4875 flags.go:64] FLAG: --enable-controller-attach-detach="true" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.492728 4875 flags.go:64] FLAG: --enable-debugging-handlers="true" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.492733 4875 flags.go:64] FLAG: --enable-load-reader="false" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.492739 4875 flags.go:64] FLAG: --enable-server="true" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.492745 4875 flags.go:64] FLAG: --enforce-node-allocatable="[pods]" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.492752 4875 flags.go:64] FLAG: --event-burst="100" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.492757 4875 flags.go:64] FLAG: --event-qps="50" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.492762 4875 flags.go:64] FLAG: --event-storage-age-limit="default=0" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.492768 4875 flags.go:64] FLAG: --event-storage-event-limit="default=0" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.492773 4875 flags.go:64] FLAG: --eviction-hard="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.492779 4875 flags.go:64] FLAG: --eviction-max-pod-grace-period="0" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.492784 4875 flags.go:64] FLAG: --eviction-minimum-reclaim="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.492794 4875 flags.go:64] FLAG: --eviction-pressure-transition-period="5m0s" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.492800 4875 flags.go:64] FLAG: --eviction-soft="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.492806 4875 flags.go:64] FLAG: --eviction-soft-grace-period="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.492811 4875 flags.go:64] FLAG: --exit-on-lock-contention="false" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.492816 4875 flags.go:64] FLAG: --experimental-allocatable-ignore-eviction="false" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.492821 4875 flags.go:64] FLAG: --experimental-mounter-path="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.492826 4875 flags.go:64] FLAG: --fail-cgroupv1="false" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.492831 4875 flags.go:64] FLAG: --fail-swap-on="true" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.492840 4875 flags.go:64] FLAG: --feature-gates="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.492847 4875 flags.go:64] FLAG: --file-check-frequency="20s" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.492852 4875 flags.go:64] FLAG: --global-housekeeping-interval="1m0s" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.492858 4875 flags.go:64] FLAG: --hairpin-mode="promiscuous-bridge" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.492863 4875 flags.go:64] FLAG: --healthz-bind-address="127.0.0.1" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.492868 4875 flags.go:64] FLAG: --healthz-port="10248" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.492872 4875 flags.go:64] FLAG: --help="false" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.492893 4875 flags.go:64] FLAG: --hostname-override="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.492898 4875 flags.go:64] FLAG: --housekeeping-interval="10s" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.492903 4875 flags.go:64] FLAG: --http-check-frequency="20s" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.492908 4875 flags.go:64] FLAG: --image-credential-provider-bin-dir="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.492913 4875 flags.go:64] FLAG: --image-credential-provider-config="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.492919 4875 flags.go:64] FLAG: --image-gc-high-threshold="85" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.492924 4875 flags.go:64] FLAG: --image-gc-low-threshold="80" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.492930 4875 flags.go:64] FLAG: --image-service-endpoint="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.492935 4875 flags.go:64] FLAG: --kernel-memcg-notification="false" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.492940 4875 flags.go:64] FLAG: --kube-api-burst="100" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.492945 4875 flags.go:64] FLAG: --kube-api-content-type="application/vnd.kubernetes.protobuf" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.492951 4875 flags.go:64] FLAG: --kube-api-qps="50" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.492956 4875 flags.go:64] FLAG: --kube-reserved="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.492961 4875 flags.go:64] FLAG: --kube-reserved-cgroup="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.492966 4875 flags.go:64] FLAG: --kubeconfig="/var/lib/kubelet/kubeconfig" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.492971 4875 flags.go:64] FLAG: --kubelet-cgroups="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.492976 4875 flags.go:64] FLAG: --local-storage-capacity-isolation="true" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.492983 4875 flags.go:64] FLAG: --lock-file="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.492989 4875 flags.go:64] FLAG: --log-cadvisor-usage="false" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.492994 4875 flags.go:64] FLAG: --log-flush-frequency="5s" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.493000 4875 flags.go:64] FLAG: --log-json-info-buffer-size="0" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.493008 4875 flags.go:64] FLAG: --log-json-split-stream="false" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.493013 4875 flags.go:64] FLAG: --log-text-info-buffer-size="0" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.493018 4875 flags.go:64] FLAG: --log-text-split-stream="false" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.493023 4875 flags.go:64] FLAG: --logging-format="text" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.493028 4875 flags.go:64] FLAG: --machine-id-file="/etc/machine-id,/var/lib/dbus/machine-id" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.493034 4875 flags.go:64] FLAG: --make-iptables-util-chains="true" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.493039 4875 flags.go:64] FLAG: --manifest-url="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.493045 4875 flags.go:64] FLAG: --manifest-url-header="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.493051 4875 flags.go:64] FLAG: --max-housekeeping-interval="15s" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.493056 4875 flags.go:64] FLAG: --max-open-files="1000000" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.493063 4875 flags.go:64] FLAG: --max-pods="110" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.493068 4875 flags.go:64] FLAG: --maximum-dead-containers="-1" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.493073 4875 flags.go:64] FLAG: --maximum-dead-containers-per-container="1" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.493077 4875 flags.go:64] FLAG: --memory-manager-policy="None" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.493082 4875 flags.go:64] FLAG: --minimum-container-ttl-duration="6m0s" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.493086 4875 flags.go:64] FLAG: --minimum-image-ttl-duration="2m0s" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.493090 4875 flags.go:64] FLAG: --node-ip="192.168.126.11" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.493094 4875 flags.go:64] FLAG: --node-labels="node-role.kubernetes.io/control-plane=,node-role.kubernetes.io/master=,node.openshift.io/os_id=rhcos" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.493105 4875 flags.go:64] FLAG: --node-status-max-images="50" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.493109 4875 flags.go:64] FLAG: --node-status-update-frequency="10s" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.493114 4875 flags.go:64] FLAG: --oom-score-adj="-999" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.493119 4875 flags.go:64] FLAG: --pod-cidr="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.493123 4875 flags.go:64] FLAG: --pod-infra-container-image="quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:33549946e22a9ffa738fd94b1345f90921bc8f92fa6137784cb33c77ad806f9d" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.493131 4875 flags.go:64] FLAG: --pod-manifest-path="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.493135 4875 flags.go:64] FLAG: --pod-max-pids="-1" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.493139 4875 flags.go:64] FLAG: --pods-per-core="0" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.493143 4875 flags.go:64] FLAG: --port="10250" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.493148 4875 flags.go:64] FLAG: --protect-kernel-defaults="false" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.493153 4875 flags.go:64] FLAG: --provider-id="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.493157 4875 flags.go:64] FLAG: --qos-reserved="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.493162 4875 flags.go:64] FLAG: --read-only-port="10255" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.493166 4875 flags.go:64] FLAG: --register-node="true" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.493170 4875 flags.go:64] FLAG: --register-schedulable="true" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.493175 4875 flags.go:64] FLAG: --register-with-taints="node-role.kubernetes.io/master=:NoSchedule" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.493182 4875 flags.go:64] FLAG: --registry-burst="10" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.493186 4875 flags.go:64] FLAG: --registry-qps="5" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.493190 4875 flags.go:64] FLAG: --reserved-cpus="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.493194 4875 flags.go:64] FLAG: --reserved-memory="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.493199 4875 flags.go:64] FLAG: --resolv-conf="/etc/resolv.conf" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.493204 4875 flags.go:64] FLAG: --root-dir="/var/lib/kubelet" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.493208 4875 flags.go:64] FLAG: --rotate-certificates="false" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.493212 4875 flags.go:64] FLAG: --rotate-server-certificates="false" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.493216 4875 flags.go:64] FLAG: --runonce="false" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.493220 4875 flags.go:64] FLAG: --runtime-cgroups="/system.slice/crio.service" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.493225 4875 flags.go:64] FLAG: --runtime-request-timeout="2m0s" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.493230 4875 flags.go:64] FLAG: --seccomp-default="false" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.493235 4875 flags.go:64] FLAG: --serialize-image-pulls="true" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.493240 4875 flags.go:64] FLAG: --storage-driver-buffer-duration="1m0s" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.493246 4875 flags.go:64] FLAG: --storage-driver-db="cadvisor" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.493251 4875 flags.go:64] FLAG: --storage-driver-host="localhost:8086" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.493257 4875 flags.go:64] FLAG: --storage-driver-password="root" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.493262 4875 flags.go:64] FLAG: --storage-driver-secure="false" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.493266 4875 flags.go:64] FLAG: --storage-driver-table="stats" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.493270 4875 flags.go:64] FLAG: --storage-driver-user="root" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.493275 4875 flags.go:64] FLAG: --streaming-connection-idle-timeout="4h0m0s" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.493279 4875 flags.go:64] FLAG: --sync-frequency="1m0s" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.493285 4875 flags.go:64] FLAG: --system-cgroups="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.493289 4875 flags.go:64] FLAG: --system-reserved="cpu=200m,ephemeral-storage=350Mi,memory=350Mi" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.493296 4875 flags.go:64] FLAG: --system-reserved-cgroup="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.493300 4875 flags.go:64] FLAG: --tls-cert-file="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.493305 4875 flags.go:64] FLAG: --tls-cipher-suites="[]" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.493311 4875 flags.go:64] FLAG: --tls-min-version="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.493315 4875 flags.go:64] FLAG: --tls-private-key-file="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.493319 4875 flags.go:64] FLAG: --topology-manager-policy="none" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.493324 4875 flags.go:64] FLAG: --topology-manager-policy-options="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.493328 4875 flags.go:64] FLAG: --topology-manager-scope="container" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.493332 4875 flags.go:64] FLAG: --v="2" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.493338 4875 flags.go:64] FLAG: --version="false" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.493344 4875 flags.go:64] FLAG: --vmodule="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.493349 4875 flags.go:64] FLAG: --volume-plugin-dir="/etc/kubernetes/kubelet-plugins/volume/exec" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.493354 4875 flags.go:64] FLAG: --volume-stats-agg-period="1m0s" Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.493455 4875 feature_gate.go:330] unrecognized feature gate: GatewayAPI Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.493460 4875 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.493464 4875 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.493469 4875 feature_gate.go:330] unrecognized feature gate: OVNObservability Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.493473 4875 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.493476 4875 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.493480 4875 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.493484 4875 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.493487 4875 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.493491 4875 feature_gate.go:330] unrecognized feature gate: PlatformOperators Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.493494 4875 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.493498 4875 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.493501 4875 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.493506 4875 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.493510 4875 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.493514 4875 feature_gate.go:330] unrecognized feature gate: NewOLM Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.493518 4875 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.493521 4875 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.493525 4875 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.493529 4875 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.493533 4875 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.493538 4875 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.493543 4875 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.493547 4875 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.493551 4875 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.493555 4875 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.493558 4875 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.493562 4875 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.493566 4875 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.493569 4875 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.493573 4875 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.493576 4875 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.493579 4875 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.493583 4875 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.493587 4875 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.493592 4875 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.493596 4875 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.493600 4875 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.493604 4875 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.493608 4875 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.493613 4875 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.493618 4875 feature_gate.go:330] unrecognized feature gate: InsightsConfig Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.493622 4875 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.493625 4875 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.493629 4875 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.493633 4875 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.493637 4875 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.493640 4875 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.493644 4875 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.493648 4875 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.493652 4875 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.493662 4875 feature_gate.go:330] unrecognized feature gate: PinnedImages Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.493665 4875 feature_gate.go:330] unrecognized feature gate: Example Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.493669 4875 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.493672 4875 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.493676 4875 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.493679 4875 feature_gate.go:330] unrecognized feature gate: SignatureStores Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.493683 4875 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.493686 4875 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.493690 4875 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.493693 4875 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.493697 4875 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.493700 4875 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.493705 4875 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.493709 4875 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.493713 4875 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.493717 4875 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.493722 4875 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.493726 4875 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.493731 4875 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.493735 4875 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.493752 4875 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.506570 4875 server.go:491] "Kubelet version" kubeletVersion="v1.31.5" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.506620 4875 server.go:493] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK="" Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.506699 4875 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.506707 4875 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.506712 4875 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.506718 4875 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.506723 4875 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.506727 4875 feature_gate.go:330] unrecognized feature gate: PinnedImages Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.506733 4875 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.506739 4875 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.506743 4875 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.506747 4875 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.506751 4875 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.506779 4875 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.506786 4875 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.506791 4875 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.506795 4875 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.506799 4875 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.506804 4875 feature_gate.go:330] unrecognized feature gate: SignatureStores Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.506808 4875 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.506812 4875 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.506817 4875 feature_gate.go:330] unrecognized feature gate: Example Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.506821 4875 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.506825 4875 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.506829 4875 feature_gate.go:330] unrecognized feature gate: PlatformOperators Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.506833 4875 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.506837 4875 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.506841 4875 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.506845 4875 feature_gate.go:330] unrecognized feature gate: NewOLM Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.506849 4875 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.506853 4875 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.506857 4875 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.506861 4875 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.506869 4875 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.506892 4875 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.506897 4875 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.506901 4875 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.506905 4875 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.506909 4875 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.506914 4875 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.506918 4875 feature_gate.go:330] unrecognized feature gate: InsightsConfig Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.506922 4875 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.506926 4875 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.506930 4875 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.506934 4875 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.506938 4875 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.506942 4875 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.506946 4875 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.506950 4875 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.506954 4875 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.506959 4875 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.506963 4875 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.506967 4875 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.506970 4875 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.506975 4875 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.506979 4875 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.506984 4875 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.506989 4875 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.506995 4875 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.507001 4875 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.507005 4875 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.507010 4875 feature_gate.go:330] unrecognized feature gate: OVNObservability Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.507015 4875 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.507020 4875 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.507024 4875 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.507031 4875 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.507037 4875 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.507041 4875 feature_gate.go:330] unrecognized feature gate: GatewayAPI Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.507046 4875 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.507052 4875 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.507056 4875 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.507060 4875 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.507065 4875 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.507072 4875 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.507189 4875 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.507196 4875 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.507201 4875 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.507205 4875 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.507209 4875 feature_gate.go:330] unrecognized feature gate: SignatureStores Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.507213 4875 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.507217 4875 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.507221 4875 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.507225 4875 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.507229 4875 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.507233 4875 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.507237 4875 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.507241 4875 feature_gate.go:330] unrecognized feature gate: GatewayAPI Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.507245 4875 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.507249 4875 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.507253 4875 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.507258 4875 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.507264 4875 feature_gate.go:330] unrecognized feature gate: OVNObservability Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.507269 4875 feature_gate.go:330] unrecognized feature gate: PinnedImages Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.507273 4875 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.507277 4875 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.507281 4875 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.507285 4875 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.507290 4875 feature_gate.go:330] unrecognized feature gate: Example Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.507295 4875 feature_gate.go:330] unrecognized feature gate: PlatformOperators Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.507299 4875 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.507303 4875 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.507307 4875 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.507311 4875 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.507315 4875 feature_gate.go:330] unrecognized feature gate: NewOLM Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.507319 4875 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.507322 4875 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.507326 4875 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.507332 4875 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.507336 4875 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.507340 4875 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.507344 4875 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.507348 4875 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.507352 4875 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.507356 4875 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.507360 4875 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.507364 4875 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.507368 4875 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.507372 4875 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.507376 4875 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.507380 4875 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.507385 4875 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.507390 4875 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.507395 4875 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.507399 4875 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.507404 4875 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.507408 4875 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.507412 4875 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.507416 4875 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.507420 4875 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.507425 4875 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.507430 4875 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.507435 4875 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.507440 4875 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.507445 4875 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.507449 4875 feature_gate.go:330] unrecognized feature gate: InsightsConfig Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.507453 4875 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.507457 4875 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.507461 4875 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.507465 4875 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.507469 4875 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.507473 4875 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.507477 4875 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.507481 4875 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.507485 4875 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.507489 4875 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.507496 4875 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.507680 4875 server.go:940] "Client rotation is on, will bootstrap in background" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.513596 4875 bootstrap.go:85] "Current kubeconfig file contents are still valid, no bootstrap necessary" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.513701 4875 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-client-current.pem". Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.515485 4875 server.go:997] "Starting client certificate rotation" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.515501 4875 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate rotation is enabled Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.516351 4875 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2026-02-24 05:52:08 +0000 UTC, rotation deadline is 2025-12-19 17:46:06.409778083 +0000 UTC Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.516456 4875 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Waiting 1761h49m50.893325147s for next certificate rotation Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.541429 4875 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.546921 4875 dynamic_cafile_content.go:161] "Starting controller" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.565301 4875 log.go:25] "Validated CRI v1 runtime API" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.597561 4875 log.go:25] "Validated CRI v1 image API" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.599111 4875 server.go:1437] "Using cgroup driver setting received from the CRI runtime" cgroupDriver="systemd" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.605528 4875 fs.go:133] Filesystem UUIDs: map[0b076daa-c26a-46d2-b3a6-72a8dbc6e257:/dev/vda4 2025-10-07-07-51-40-00:/dev/sr0 7B77-95E7:/dev/vda2 de0497b0-db1b-465a-b278-03db02455c71:/dev/vda3] Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.605552 4875 fs.go:134] Filesystem partitions: map[/dev/shm:{mountpoint:/dev/shm major:0 minor:22 fsType:tmpfs blockSize:0} /dev/vda3:{mountpoint:/boot major:252 minor:3 fsType:ext4 blockSize:0} /dev/vda4:{mountpoint:/var major:252 minor:4 fsType:xfs blockSize:0} /run:{mountpoint:/run major:0 minor:24 fsType:tmpfs blockSize:0} /run/user/1000:{mountpoint:/run/user/1000 major:0 minor:42 fsType:tmpfs blockSize:0} /tmp:{mountpoint:/tmp major:0 minor:30 fsType:tmpfs blockSize:0} /var/lib/etcd:{mountpoint:/var/lib/etcd major:0 minor:43 fsType:tmpfs blockSize:0}] Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.619277 4875 manager.go:217] Machine: {Timestamp:2025-10-07 07:56:15.617430405 +0000 UTC m=+0.577200968 CPUVendorID:AuthenticAMD NumCores:12 NumPhysicalCores:1 NumSockets:12 CpuFrequency:2800000 MemoryCapacity:33654124544 SwapCapacity:0 MemoryByType:map[] NVMInfo:{MemoryModeCapacity:0 AppDirectModeCapacity:0 AvgPowerBudget:0} HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] MachineID:21801e6708c44f15b81395eb736a7cec SystemUUID:288fc606-a984-4194-ac49-303e4a239cb9 BootID:e390eb55-bfef-4c82-ba02-53426a3fd939 Filesystems:[{Device:/run DeviceMajor:0 DeviceMinor:24 Capacity:6730825728 Type:vfs Inodes:819200 HasInodes:true} {Device:/dev/vda4 DeviceMajor:252 DeviceMinor:4 Capacity:85292941312 Type:vfs Inodes:41679680 HasInodes:true} {Device:/tmp DeviceMajor:0 DeviceMinor:30 Capacity:16827064320 Type:vfs Inodes:1048576 HasInodes:true} {Device:/dev/vda3 DeviceMajor:252 DeviceMinor:3 Capacity:366869504 Type:vfs Inodes:98304 HasInodes:true} {Device:/run/user/1000 DeviceMajor:0 DeviceMinor:42 Capacity:3365408768 Type:vfs Inodes:821633 HasInodes:true} {Device:/var/lib/etcd DeviceMajor:0 DeviceMinor:43 Capacity:1073741824 Type:vfs Inodes:4108169 HasInodes:true} {Device:/dev/shm DeviceMajor:0 DeviceMinor:22 Capacity:16827060224 Type:vfs Inodes:4108169 HasInodes:true}] DiskMap:map[252:0:{Name:vda Major:252 Minor:0 Size:214748364800 Scheduler:none}] NetworkDevices:[{Name:br-ex MacAddress:fa:16:3e:93:4f:f8 Speed:0 Mtu:1500} {Name:br-int MacAddress:d6:39:55:2e:22:71 Speed:0 Mtu:1400} {Name:ens3 MacAddress:fa:16:3e:93:4f:f8 Speed:-1 Mtu:1500} {Name:ens7 MacAddress:fa:16:3e:ba:61:54 Speed:-1 Mtu:1500} {Name:ens7.20 MacAddress:52:54:00:f6:c0:6a Speed:-1 Mtu:1496} {Name:ens7.21 MacAddress:52:54:00:9d:ea:4e Speed:-1 Mtu:1496} {Name:ens7.22 MacAddress:52:54:00:3a:8a:d0 Speed:-1 Mtu:1496} {Name:eth10 MacAddress:06:ed:1a:0f:3b:80 Speed:0 Mtu:1500} {Name:ovn-k8s-mp0 MacAddress:0a:58:0a:d9:00:02 Speed:0 Mtu:1400} {Name:ovs-system MacAddress:9a:e4:35:e3:ad:eb Speed:0 Mtu:1500}] Topology:[{Id:0 Memory:33654124544 HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] Cores:[{Id:0 Threads:[0] Caches:[{Id:0 Size:32768 Type:Data Level:1} {Id:0 Size:32768 Type:Instruction Level:1} {Id:0 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:0 Size:16777216 Type:Unified Level:3}] SocketID:0 BookID: DrawerID:} {Id:0 Threads:[1] Caches:[{Id:1 Size:32768 Type:Data Level:1} {Id:1 Size:32768 Type:Instruction Level:1} {Id:1 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:1 Size:16777216 Type:Unified Level:3}] SocketID:1 BookID: DrawerID:} {Id:0 Threads:[10] Caches:[{Id:10 Size:32768 Type:Data Level:1} {Id:10 Size:32768 Type:Instruction Level:1} {Id:10 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:10 Size:16777216 Type:Unified Level:3}] SocketID:10 BookID: DrawerID:} {Id:0 Threads:[11] Caches:[{Id:11 Size:32768 Type:Data Level:1} {Id:11 Size:32768 Type:Instruction Level:1} {Id:11 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:11 Size:16777216 Type:Unified Level:3}] SocketID:11 BookID: DrawerID:} {Id:0 Threads:[2] Caches:[{Id:2 Size:32768 Type:Data Level:1} {Id:2 Size:32768 Type:Instruction Level:1} {Id:2 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:2 Size:16777216 Type:Unified Level:3}] SocketID:2 BookID: DrawerID:} {Id:0 Threads:[3] Caches:[{Id:3 Size:32768 Type:Data Level:1} {Id:3 Size:32768 Type:Instruction Level:1} {Id:3 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:3 Size:16777216 Type:Unified Level:3}] SocketID:3 BookID: DrawerID:} {Id:0 Threads:[4] Caches:[{Id:4 Size:32768 Type:Data Level:1} {Id:4 Size:32768 Type:Instruction Level:1} {Id:4 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:4 Size:16777216 Type:Unified Level:3}] SocketID:4 BookID: DrawerID:} {Id:0 Threads:[5] Caches:[{Id:5 Size:32768 Type:Data Level:1} {Id:5 Size:32768 Type:Instruction Level:1} {Id:5 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:5 Size:16777216 Type:Unified Level:3}] SocketID:5 BookID: DrawerID:} {Id:0 Threads:[6] Caches:[{Id:6 Size:32768 Type:Data Level:1} {Id:6 Size:32768 Type:Instruction Level:1} {Id:6 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:6 Size:16777216 Type:Unified Level:3}] SocketID:6 BookID: DrawerID:} {Id:0 Threads:[7] Caches:[{Id:7 Size:32768 Type:Data Level:1} {Id:7 Size:32768 Type:Instruction Level:1} {Id:7 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:7 Size:16777216 Type:Unified Level:3}] SocketID:7 BookID: DrawerID:} {Id:0 Threads:[8] Caches:[{Id:8 Size:32768 Type:Data Level:1} {Id:8 Size:32768 Type:Instruction Level:1} {Id:8 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:8 Size:16777216 Type:Unified Level:3}] SocketID:8 BookID: DrawerID:} {Id:0 Threads:[9] Caches:[{Id:9 Size:32768 Type:Data Level:1} {Id:9 Size:32768 Type:Instruction Level:1} {Id:9 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:9 Size:16777216 Type:Unified Level:3}] SocketID:9 BookID: DrawerID:}] Caches:[] Distances:[10]}] CloudProvider:Unknown InstanceType:Unknown InstanceID:None} Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.619482 4875 manager_no_libpfm.go:29] cAdvisor is build without cgo and/or libpfm support. Perf event counters are not available. Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.619655 4875 manager.go:233] Version: {KernelVersion:5.14.0-427.50.2.el9_4.x86_64 ContainerOsVersion:Red Hat Enterprise Linux CoreOS 418.94.202502100215-0 DockerVersion: DockerAPIVersion: CadvisorVersion: CadvisorRevision:} Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.620734 4875 swap_util.go:113] "Swap is on" /proc/swaps contents="Filename\t\t\t\tType\t\tSize\t\tUsed\t\tPriority" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.621024 4875 container_manager_linux.go:267] "Container manager verified user specified cgroup-root exists" cgroupRoot=[] Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.621059 4875 container_manager_linux.go:272] "Creating Container Manager object based on Node Config" nodeConfig={"NodeName":"crc","RuntimeCgroupsName":"/system.slice/crio.service","SystemCgroupsName":"/system.slice","KubeletCgroupsName":"","KubeletOOMScoreAdj":-999,"ContainerRuntime":"","CgroupsPerQOS":true,"CgroupRoot":"/","CgroupDriver":"systemd","KubeletRootDir":"/var/lib/kubelet","ProtectKernelDefaults":true,"KubeReservedCgroupName":"","SystemReservedCgroupName":"","ReservedSystemCPUs":{},"EnforceNodeAllocatable":{"pods":{}},"KubeReserved":null,"SystemReserved":{"cpu":"200m","ephemeral-storage":"350Mi","memory":"350Mi"},"HardEvictionThresholds":[{"Signal":"memory.available","Operator":"LessThan","Value":{"Quantity":"100Mi","Percentage":0},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.1},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.15},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null}],"QOSReserved":{},"CPUManagerPolicy":"none","CPUManagerPolicyOptions":null,"TopologyManagerScope":"container","CPUManagerReconcilePeriod":10000000000,"ExperimentalMemoryManagerPolicy":"None","ExperimentalMemoryManagerReservedMemory":null,"PodPidsLimit":4096,"EnforceCPULimits":true,"CPUCFSQuotaPeriod":100000000,"TopologyManagerPolicy":"none","TopologyManagerPolicyOptions":null,"CgroupVersion":2} Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.623068 4875 topology_manager.go:138] "Creating topology manager with none policy" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.623096 4875 container_manager_linux.go:303] "Creating device plugin manager" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.624006 4875 manager.go:142] "Creating Device Plugin manager" path="/var/lib/kubelet/device-plugins/kubelet.sock" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.624036 4875 server.go:66] "Creating device plugin registration server" version="v1beta1" socket="/var/lib/kubelet/device-plugins/kubelet.sock" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.625164 4875 state_mem.go:36] "Initialized new in-memory state store" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.625275 4875 server.go:1245] "Using root directory" path="/var/lib/kubelet" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.629408 4875 kubelet.go:418] "Attempting to sync node with API server" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.629463 4875 kubelet.go:313] "Adding static pod path" path="/etc/kubernetes/manifests" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.629509 4875 file.go:69] "Watching path" path="/etc/kubernetes/manifests" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.629530 4875 kubelet.go:324] "Adding apiserver pod source" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.629546 4875 apiserver.go:42] "Waiting for node sync before watching apiserver pods" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.634774 4875 kuberuntime_manager.go:262] "Container runtime initialized" containerRuntime="cri-o" version="1.31.5-4.rhaos4.18.gitdad78d5.el9" apiVersion="v1" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.637344 4875 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-server-current.pem". Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.639565 4875 kubelet.go:854] "Not starting ClusterTrustBundle informer because we are in static kubelet mode" Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.640799 4875 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.65:6443: connect: connection refused Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.640808 4875 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.65:6443: connect: connection refused Oct 07 07:56:15 crc kubenswrapper[4875]: E1007 07:56:15.640915 4875 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.65:6443: connect: connection refused" logger="UnhandledError" Oct 07 07:56:15 crc kubenswrapper[4875]: E1007 07:56:15.640925 4875 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.65:6443: connect: connection refused" logger="UnhandledError" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.641618 4875 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/portworx-volume" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.641800 4875 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/empty-dir" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.641978 4875 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/git-repo" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.642118 4875 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/host-path" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.642231 4875 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/nfs" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.642364 4875 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/secret" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.642494 4875 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/iscsi" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.642635 4875 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/downward-api" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.642826 4875 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/fc" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.642917 4875 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/configmap" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.642992 4875 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/projected" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.643053 4875 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/local-volume" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.644430 4875 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/csi" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.645093 4875 server.go:1280] "Started kubelet" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.645391 4875 ratelimit.go:55] "Setting rate limiting for endpoint" service="podresources" qps=100 burstTokens=10 Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.645323 4875 server.go:163] "Starting to listen" address="0.0.0.0" port=10250 Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.645653 4875 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.65:6443: connect: connection refused Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.645864 4875 server.go:236] "Starting to serve the podresources API" endpoint="unix:/var/lib/kubelet/pod-resources/kubelet.sock" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.646833 4875 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate rotation is enabled Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.646888 4875 fs_resource_analyzer.go:67] "Starting FS ResourceAnalyzer" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.646943 4875 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-29 01:26:15.973837246 +0000 UTC Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.646976 4875 certificate_manager.go:356] kubernetes.io/kubelet-serving: Waiting 1265h30m0.326862708s for next certificate rotation Oct 07 07:56:15 crc systemd[1]: Started Kubernetes Kubelet. Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.647156 4875 server.go:460] "Adding debug handlers to kubelet server" Oct 07 07:56:15 crc kubenswrapper[4875]: E1007 07:56:15.647194 4875 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.647227 4875 volume_manager.go:287] "The desired_state_of_world populator starts" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.647243 4875 volume_manager.go:289] "Starting Kubelet Volume Manager" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.647346 4875 desired_state_of_world_populator.go:146] "Desired state populator starts to run" Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.647694 4875 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.65:6443: connect: connection refused Oct 07 07:56:15 crc kubenswrapper[4875]: E1007 07:56:15.647752 4875 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.65:6443: connect: connection refused" logger="UnhandledError" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.655587 4875 factory.go:55] Registering systemd factory Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.655613 4875 factory.go:221] Registration of the systemd container factory successfully Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.655946 4875 factory.go:153] Registering CRI-O factory Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.655987 4875 factory.go:221] Registration of the crio container factory successfully Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.656095 4875 factory.go:219] Registration of the containerd container factory failed: unable to create containerd client: containerd: cannot unix dial containerd api service: dial unix /run/containerd/containerd.sock: connect: no such file or directory Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.656135 4875 factory.go:103] Registering Raw factory Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.656155 4875 manager.go:1196] Started watching for new ooms in manager Oct 07 07:56:15 crc kubenswrapper[4875]: E1007 07:56:15.655945 4875 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.65:6443: connect: connection refused" interval="200ms" Oct 07 07:56:15 crc kubenswrapper[4875]: E1007 07:56:15.657692 4875 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 38.102.83.65:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.186c2666669c4c79 default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-10-07 07:56:15.645060217 +0000 UTC m=+0.604830770,LastTimestamp:2025-10-07 07:56:15.645060217 +0000 UTC m=+0.604830770,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.663658 4875 manager.go:319] Starting recovery of all containers Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.664677 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.664724 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.664738 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.667639 4875 reconstruct.go:144] "Volume is marked device as uncertain and added into the actual state" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" deviceMountPath="/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.667676 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.667691 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.667703 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.667718 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.667732 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.667749 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.667762 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.667775 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.667789 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.667801 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.667817 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.667829 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.667841 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.667852 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.667864 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.667898 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.667914 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.667928 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.667941 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.667956 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.667970 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.667986 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.668000 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.668015 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.668053 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.668087 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.668106 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.668120 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.668139 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.668152 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.668166 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.668180 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.668192 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.668204 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.668219 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.668230 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.668243 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.668257 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.668270 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.668309 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.668322 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.668335 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.668347 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.668358 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.668369 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3b6479f0-333b-4a96-9adf-2099afdc2447" volumeName="kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.668381 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.668393 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.668405 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.668416 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.668447 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.668463 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.668476 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.668488 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.668503 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.668516 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.668529 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.668541 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.668553 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.668565 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.668578 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.668591 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.668604 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.668616 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="44663579-783b-4372-86d6-acf235a62d72" volumeName="kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.668629 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.668678 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.668694 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.668706 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.668720 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.668733 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49ef4625-1d3a-4a9f-b595-c2433d32326d" volumeName="kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.668749 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.668762 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.668776 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.668787 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.668800 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.668813 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.668825 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.668841 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.668855 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.668868 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.668898 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.668911 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.668925 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.668938 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.668949 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.668963 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.668979 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.668993 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.669005 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.669019 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.669031 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.669044 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.669057 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.669073 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.669112 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.669127 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.669140 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.669152 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.669165 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.669178 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.669192 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.669207 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.669226 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" volumeName="kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.669241 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.669255 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.669270 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.669284 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.669298 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.669312 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.669325 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.669338 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.669353 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.669366 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.669395 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.669410 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.669422 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.669434 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.669446 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.669460 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.669471 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.669487 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.669499 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.669510 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.669522 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.669533 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.669583 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.669597 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.669611 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.669624 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.669638 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.669651 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.669664 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.669677 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.669692 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.669705 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.669718 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.669730 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.669742 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.669755 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d751cbb-f2e2-430d-9754-c882a5e924a5" volumeName="kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.669769 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.669782 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.669796 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.669810 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.669823 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.669837 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.669850 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.669862 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.669890 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.669905 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.669918 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.669933 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.669947 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.669960 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.671647 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.671673 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.671685 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.671696 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.671711 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.671723 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.671734 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.671745 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.671757 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.671771 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.671782 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.671794 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.671809 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.671823 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.671837 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.671850 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.671863 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.671889 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.671903 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.671916 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.671928 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.671943 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.671956 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.671970 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.672058 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.672077 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.672088 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.672101 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.672115 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.672129 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.672141 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.672153 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.672165 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.672177 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.672188 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" volumeName="kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.672199 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.672208 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.672219 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.672229 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.672241 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.672254 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.672266 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.672282 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.672295 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.672307 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.672317 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.672326 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.672335 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.672344 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.672353 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.672363 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.672376 4875 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" seLinuxMountContext="" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.672386 4875 reconstruct.go:97] "Volume reconstruction finished" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.672394 4875 reconciler.go:26] "Reconciler: start to sync state" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.683975 4875 manager.go:324] Recovery completed Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.692272 4875 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv4" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.692808 4875 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.696034 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.696071 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.696082 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.696189 4875 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv6" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.696217 4875 status_manager.go:217] "Starting to sync pod status with apiserver" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.696239 4875 kubelet.go:2335] "Starting kubelet main sync loop" Oct 07 07:56:15 crc kubenswrapper[4875]: E1007 07:56:15.696399 4875 kubelet.go:2359] "Skipping pod synchronization" err="[container runtime status check may not have completed yet, PLEG is not healthy: pleg has yet to be successful]" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.696863 4875 cpu_manager.go:225] "Starting CPU manager" policy="none" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.696897 4875 cpu_manager.go:226] "Reconciling" reconcilePeriod="10s" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.696919 4875 state_mem.go:36] "Initialized new in-memory state store" Oct 07 07:56:15 crc kubenswrapper[4875]: W1007 07:56:15.698658 4875 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.65:6443: connect: connection refused Oct 07 07:56:15 crc kubenswrapper[4875]: E1007 07:56:15.698833 4875 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.65:6443: connect: connection refused" logger="UnhandledError" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.714540 4875 policy_none.go:49] "None policy: Start" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.715938 4875 memory_manager.go:170] "Starting memorymanager" policy="None" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.715986 4875 state_mem.go:35] "Initializing new in-memory state store" Oct 07 07:56:15 crc kubenswrapper[4875]: E1007 07:56:15.748640 4875 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.771215 4875 manager.go:334] "Starting Device Plugin manager" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.771413 4875 manager.go:513] "Failed to read data from checkpoint" checkpoint="kubelet_internal_checkpoint" err="checkpoint is not found" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.771429 4875 server.go:79] "Starting device plugin registration server" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.771914 4875 eviction_manager.go:189] "Eviction manager: starting control loop" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.771930 4875 container_log_manager.go:189] "Initializing container log rotate workers" workers=1 monitorPeriod="10s" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.772674 4875 plugin_watcher.go:51] "Plugin Watcher Start" path="/var/lib/kubelet/plugins_registry" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.773906 4875 plugin_manager.go:116] "The desired_state_of_world populator (plugin watcher) starts" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.773919 4875 plugin_manager.go:118] "Starting Kubelet Plugin Manager" Oct 07 07:56:15 crc kubenswrapper[4875]: E1007 07:56:15.782762 4875 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.796984 4875 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc","openshift-kube-controller-manager/kube-controller-manager-crc","openshift-kube-scheduler/openshift-kube-scheduler-crc","openshift-machine-config-operator/kube-rbac-proxy-crio-crc","openshift-etcd/etcd-crc"] Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.797132 4875 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.798200 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.798236 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.798245 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.798383 4875 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.799003 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.799083 4875 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.799624 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.799649 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.799659 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.799794 4875 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.800318 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.800363 4875 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.801505 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.801599 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.801612 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.801622 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.801766 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.801796 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.801809 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.802088 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.802162 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.802707 4875 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.802871 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.802934 4875 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.803550 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.803571 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.803578 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.803649 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.803671 4875 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.803937 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.804063 4875 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.804156 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.804178 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.804187 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.803670 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.804250 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.804384 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.804419 4875 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.805218 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.805338 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.805377 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.805448 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.805457 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.805429 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:15 crc kubenswrapper[4875]: E1007 07:56:15.857155 4875 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.65:6443: connect: connection refused" interval="400ms" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.872937 4875 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.873616 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.873649 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.873666 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.873681 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.873730 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.873746 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.873764 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.873781 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.873842 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.873944 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.873976 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.873998 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.874017 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.874036 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.874056 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.874708 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.874753 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.874764 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.874788 4875 kubelet_node_status.go:76] "Attempting to register node" node="crc" Oct 07 07:56:15 crc kubenswrapper[4875]: E1007 07:56:15.875253 4875 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.65:6443: connect: connection refused" node="crc" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.975763 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.975923 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.975955 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.975957 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.976006 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.976066 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.976086 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.976092 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.976141 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.976102 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.976109 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.976167 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.976190 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.976052 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.976184 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.976170 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.976337 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.976412 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.976435 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.976458 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.976496 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.976500 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.976460 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.976505 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.976535 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.976543 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.976565 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.976454 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.976602 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 07 07:56:15 crc kubenswrapper[4875]: I1007 07:56:15.976674 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 07 07:56:16 crc kubenswrapper[4875]: I1007 07:56:16.076120 4875 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 07:56:16 crc kubenswrapper[4875]: I1007 07:56:16.078041 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:16 crc kubenswrapper[4875]: I1007 07:56:16.078079 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:16 crc kubenswrapper[4875]: I1007 07:56:16.078090 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:16 crc kubenswrapper[4875]: I1007 07:56:16.078116 4875 kubelet_node_status.go:76] "Attempting to register node" node="crc" Oct 07 07:56:16 crc kubenswrapper[4875]: E1007 07:56:16.078435 4875 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.65:6443: connect: connection refused" node="crc" Oct 07 07:56:16 crc kubenswrapper[4875]: I1007 07:56:16.131994 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 07 07:56:16 crc kubenswrapper[4875]: I1007 07:56:16.140439 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 07 07:56:16 crc kubenswrapper[4875]: I1007 07:56:16.161648 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 07 07:56:16 crc kubenswrapper[4875]: I1007 07:56:16.182512 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 07 07:56:16 crc kubenswrapper[4875]: W1007 07:56:16.183817 4875 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf4b27818a5e8e43d0dc095d08835c792.slice/crio-9aeebc8d3a3dffbec1cdb525946b40d7d8cf870e1f3402df42c092aec9d8f0dc WatchSource:0}: Error finding container 9aeebc8d3a3dffbec1cdb525946b40d7d8cf870e1f3402df42c092aec9d8f0dc: Status 404 returned error can't find the container with id 9aeebc8d3a3dffbec1cdb525946b40d7d8cf870e1f3402df42c092aec9d8f0dc Oct 07 07:56:16 crc kubenswrapper[4875]: W1007 07:56:16.184279 4875 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf614b9022728cf315e60c057852e563e.slice/crio-68a0f2a9aca8a47769c01557ca055ef64da57576c774d1167b726d98fa991528 WatchSource:0}: Error finding container 68a0f2a9aca8a47769c01557ca055ef64da57576c774d1167b726d98fa991528: Status 404 returned error can't find the container with id 68a0f2a9aca8a47769c01557ca055ef64da57576c774d1167b726d98fa991528 Oct 07 07:56:16 crc kubenswrapper[4875]: W1007 07:56:16.188039 4875 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3dcd261975c3d6b9a6ad6367fd4facd3.slice/crio-6e5ada2d8d56311032f65a020f744ed5d9220d68a34a7e91224711883171c2bc WatchSource:0}: Error finding container 6e5ada2d8d56311032f65a020f744ed5d9220d68a34a7e91224711883171c2bc: Status 404 returned error can't find the container with id 6e5ada2d8d56311032f65a020f744ed5d9220d68a34a7e91224711883171c2bc Oct 07 07:56:16 crc kubenswrapper[4875]: I1007 07:56:16.188399 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Oct 07 07:56:16 crc kubenswrapper[4875]: W1007 07:56:16.198027 4875 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd1b160f5dda77d281dd8e69ec8d817f9.slice/crio-0a7f9707eba5001d4bc012870715eff4ef3167e568d527fcd16b6d4878ea609f WatchSource:0}: Error finding container 0a7f9707eba5001d4bc012870715eff4ef3167e568d527fcd16b6d4878ea609f: Status 404 returned error can't find the container with id 0a7f9707eba5001d4bc012870715eff4ef3167e568d527fcd16b6d4878ea609f Oct 07 07:56:16 crc kubenswrapper[4875]: W1007 07:56:16.202663 4875 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2139d3e2895fc6797b9c76a1b4c9886d.slice/crio-a626ecba7400714620e6c6741c5b916d6c83721e90e60f268c1b5ba93c8243e8 WatchSource:0}: Error finding container a626ecba7400714620e6c6741c5b916d6c83721e90e60f268c1b5ba93c8243e8: Status 404 returned error can't find the container with id a626ecba7400714620e6c6741c5b916d6c83721e90e60f268c1b5ba93c8243e8 Oct 07 07:56:16 crc kubenswrapper[4875]: E1007 07:56:16.258398 4875 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.65:6443: connect: connection refused" interval="800ms" Oct 07 07:56:16 crc kubenswrapper[4875]: I1007 07:56:16.479382 4875 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 07:56:16 crc kubenswrapper[4875]: I1007 07:56:16.480795 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:16 crc kubenswrapper[4875]: I1007 07:56:16.480833 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:16 crc kubenswrapper[4875]: I1007 07:56:16.480842 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:16 crc kubenswrapper[4875]: I1007 07:56:16.480896 4875 kubelet_node_status.go:76] "Attempting to register node" node="crc" Oct 07 07:56:16 crc kubenswrapper[4875]: E1007 07:56:16.481349 4875 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.65:6443: connect: connection refused" node="crc" Oct 07 07:56:16 crc kubenswrapper[4875]: W1007 07:56:16.509046 4875 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.65:6443: connect: connection refused Oct 07 07:56:16 crc kubenswrapper[4875]: E1007 07:56:16.509131 4875 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.65:6443: connect: connection refused" logger="UnhandledError" Oct 07 07:56:16 crc kubenswrapper[4875]: I1007 07:56:16.646702 4875 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.65:6443: connect: connection refused Oct 07 07:56:16 crc kubenswrapper[4875]: I1007 07:56:16.699691 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"9aeebc8d3a3dffbec1cdb525946b40d7d8cf870e1f3402df42c092aec9d8f0dc"} Oct 07 07:56:16 crc kubenswrapper[4875]: I1007 07:56:16.700371 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"68a0f2a9aca8a47769c01557ca055ef64da57576c774d1167b726d98fa991528"} Oct 07 07:56:16 crc kubenswrapper[4875]: I1007 07:56:16.700942 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"a626ecba7400714620e6c6741c5b916d6c83721e90e60f268c1b5ba93c8243e8"} Oct 07 07:56:16 crc kubenswrapper[4875]: I1007 07:56:16.701901 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"0a7f9707eba5001d4bc012870715eff4ef3167e568d527fcd16b6d4878ea609f"} Oct 07 07:56:16 crc kubenswrapper[4875]: I1007 07:56:16.702426 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"6e5ada2d8d56311032f65a020f744ed5d9220d68a34a7e91224711883171c2bc"} Oct 07 07:56:16 crc kubenswrapper[4875]: W1007 07:56:16.712204 4875 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.65:6443: connect: connection refused Oct 07 07:56:16 crc kubenswrapper[4875]: E1007 07:56:16.712300 4875 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.65:6443: connect: connection refused" logger="UnhandledError" Oct 07 07:56:16 crc kubenswrapper[4875]: W1007 07:56:16.993977 4875 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.65:6443: connect: connection refused Oct 07 07:56:16 crc kubenswrapper[4875]: E1007 07:56:16.994119 4875 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.65:6443: connect: connection refused" logger="UnhandledError" Oct 07 07:56:17 crc kubenswrapper[4875]: E1007 07:56:17.059178 4875 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.65:6443: connect: connection refused" interval="1.6s" Oct 07 07:56:17 crc kubenswrapper[4875]: W1007 07:56:17.144460 4875 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.65:6443: connect: connection refused Oct 07 07:56:17 crc kubenswrapper[4875]: E1007 07:56:17.144559 4875 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.65:6443: connect: connection refused" logger="UnhandledError" Oct 07 07:56:17 crc kubenswrapper[4875]: I1007 07:56:17.282232 4875 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 07:56:17 crc kubenswrapper[4875]: I1007 07:56:17.284087 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:17 crc kubenswrapper[4875]: I1007 07:56:17.284135 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:17 crc kubenswrapper[4875]: I1007 07:56:17.284146 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:17 crc kubenswrapper[4875]: I1007 07:56:17.284168 4875 kubelet_node_status.go:76] "Attempting to register node" node="crc" Oct 07 07:56:17 crc kubenswrapper[4875]: E1007 07:56:17.284646 4875 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.65:6443: connect: connection refused" node="crc" Oct 07 07:56:17 crc kubenswrapper[4875]: E1007 07:56:17.435978 4875 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 38.102.83.65:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.186c2666669c4c79 default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-10-07 07:56:15.645060217 +0000 UTC m=+0.604830770,LastTimestamp:2025-10-07 07:56:15.645060217 +0000 UTC m=+0.604830770,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Oct 07 07:56:17 crc kubenswrapper[4875]: I1007 07:56:17.646626 4875 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.65:6443: connect: connection refused Oct 07 07:56:17 crc kubenswrapper[4875]: I1007 07:56:17.705959 4875 generic.go:334] "Generic (PLEG): container finished" podID="3dcd261975c3d6b9a6ad6367fd4facd3" containerID="965fc2ef9b26d12ee6c5e6adb88200cb52574c8f861f08d1f767c23ea861e6ef" exitCode=0 Oct 07 07:56:17 crc kubenswrapper[4875]: I1007 07:56:17.706031 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerDied","Data":"965fc2ef9b26d12ee6c5e6adb88200cb52574c8f861f08d1f767c23ea861e6ef"} Oct 07 07:56:17 crc kubenswrapper[4875]: I1007 07:56:17.706116 4875 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 07:56:17 crc kubenswrapper[4875]: I1007 07:56:17.707068 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:17 crc kubenswrapper[4875]: I1007 07:56:17.707090 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:17 crc kubenswrapper[4875]: I1007 07:56:17.707101 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:17 crc kubenswrapper[4875]: I1007 07:56:17.707792 4875 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="ec502d866371b40ecc3c8728889725cc1c2d9c8be18dbce17935992980fc1b9a" exitCode=0 Oct 07 07:56:17 crc kubenswrapper[4875]: I1007 07:56:17.707829 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"ec502d866371b40ecc3c8728889725cc1c2d9c8be18dbce17935992980fc1b9a"} Oct 07 07:56:17 crc kubenswrapper[4875]: I1007 07:56:17.707911 4875 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 07:56:17 crc kubenswrapper[4875]: I1007 07:56:17.708525 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:17 crc kubenswrapper[4875]: I1007 07:56:17.708540 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:17 crc kubenswrapper[4875]: I1007 07:56:17.708547 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:17 crc kubenswrapper[4875]: I1007 07:56:17.710044 4875 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 07:56:17 crc kubenswrapper[4875]: I1007 07:56:17.710618 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:17 crc kubenswrapper[4875]: I1007 07:56:17.710635 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:17 crc kubenswrapper[4875]: I1007 07:56:17.710651 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:17 crc kubenswrapper[4875]: I1007 07:56:17.712022 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"841f5c2218ad3912bf23dc2df80f24ca696829ba8e2c2d9722264688b25f0846"} Oct 07 07:56:17 crc kubenswrapper[4875]: I1007 07:56:17.712053 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"5f2213a20d36b8a125c6004957d5be402e0b85bbc2165ebd964ab499dfffb877"} Oct 07 07:56:17 crc kubenswrapper[4875]: I1007 07:56:17.712062 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"9ae8881986c1b2abd841adfadbac7dc3d73096c5366485350536c08f478f9762"} Oct 07 07:56:17 crc kubenswrapper[4875]: I1007 07:56:17.712071 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"05f20f2103458077317614bcd5338d803c52c67e7dfc562103c817d33fa5bc79"} Oct 07 07:56:17 crc kubenswrapper[4875]: I1007 07:56:17.712143 4875 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 07:56:17 crc kubenswrapper[4875]: I1007 07:56:17.712737 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:17 crc kubenswrapper[4875]: I1007 07:56:17.712756 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:17 crc kubenswrapper[4875]: I1007 07:56:17.712764 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:17 crc kubenswrapper[4875]: I1007 07:56:17.714504 4875 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="cfae59bc7d6183b2b5c6c04dd53b1bfc8b62abf90f67f74c7a56bf5039a5b50a" exitCode=0 Oct 07 07:56:17 crc kubenswrapper[4875]: I1007 07:56:17.714542 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"cfae59bc7d6183b2b5c6c04dd53b1bfc8b62abf90f67f74c7a56bf5039a5b50a"} Oct 07 07:56:17 crc kubenswrapper[4875]: I1007 07:56:17.714621 4875 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 07:56:17 crc kubenswrapper[4875]: I1007 07:56:17.715134 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:17 crc kubenswrapper[4875]: I1007 07:56:17.715151 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:17 crc kubenswrapper[4875]: I1007 07:56:17.715159 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:17 crc kubenswrapper[4875]: I1007 07:56:17.716435 4875 generic.go:334] "Generic (PLEG): container finished" podID="d1b160f5dda77d281dd8e69ec8d817f9" containerID="1222e78ddf0e6e37cfd1354cd95301c49aac7faf38193b66c6a3b66b0617d018" exitCode=0 Oct 07 07:56:17 crc kubenswrapper[4875]: I1007 07:56:17.716455 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerDied","Data":"1222e78ddf0e6e37cfd1354cd95301c49aac7faf38193b66c6a3b66b0617d018"} Oct 07 07:56:17 crc kubenswrapper[4875]: I1007 07:56:17.716497 4875 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 07:56:17 crc kubenswrapper[4875]: I1007 07:56:17.724781 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:17 crc kubenswrapper[4875]: I1007 07:56:17.724820 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:17 crc kubenswrapper[4875]: I1007 07:56:17.724838 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:18 crc kubenswrapper[4875]: W1007 07:56:18.511822 4875 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.65:6443: connect: connection refused Oct 07 07:56:18 crc kubenswrapper[4875]: E1007 07:56:18.512018 4875 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.65:6443: connect: connection refused" logger="UnhandledError" Oct 07 07:56:18 crc kubenswrapper[4875]: I1007 07:56:18.647229 4875 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.65:6443: connect: connection refused Oct 07 07:56:18 crc kubenswrapper[4875]: W1007 07:56:18.651569 4875 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.65:6443: connect: connection refused Oct 07 07:56:18 crc kubenswrapper[4875]: E1007 07:56:18.651653 4875 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.65:6443: connect: connection refused" logger="UnhandledError" Oct 07 07:56:18 crc kubenswrapper[4875]: E1007 07:56:18.660374 4875 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.65:6443: connect: connection refused" interval="3.2s" Oct 07 07:56:18 crc kubenswrapper[4875]: I1007 07:56:18.722868 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"d0b72e1d82ba1cfc118ea122535db5c358f503a17b497c21c481e1b56bb578e8"} Oct 07 07:56:18 crc kubenswrapper[4875]: I1007 07:56:18.722950 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"c4bed5cf79847998d3d72c69b2f5773d64affb1a1f13ecc3af99bda8307bb8d1"} Oct 07 07:56:18 crc kubenswrapper[4875]: I1007 07:56:18.722962 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"1ba6ab5ec6a70a00059ba119970c6b139879c3c568475f2acf45b00f4ade9e22"} Oct 07 07:56:18 crc kubenswrapper[4875]: I1007 07:56:18.722973 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"4ca1c70fa4c2dd9e87cc15766e27c2735768f392d019d6004ae50c94595651c3"} Oct 07 07:56:18 crc kubenswrapper[4875]: I1007 07:56:18.726694 4875 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="88968e9f639dc3664a87729128c531283b37b7c2da6aae70905f9f63a980fb54" exitCode=0 Oct 07 07:56:18 crc kubenswrapper[4875]: I1007 07:56:18.726759 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"88968e9f639dc3664a87729128c531283b37b7c2da6aae70905f9f63a980fb54"} Oct 07 07:56:18 crc kubenswrapper[4875]: I1007 07:56:18.726921 4875 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 07:56:18 crc kubenswrapper[4875]: I1007 07:56:18.727946 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:18 crc kubenswrapper[4875]: I1007 07:56:18.728192 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:18 crc kubenswrapper[4875]: I1007 07:56:18.728207 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:18 crc kubenswrapper[4875]: I1007 07:56:18.729631 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"21a5c9629b4ec0c85ab80d893dd502732395c3dd8b418a686d3c2ac55e69fa8e"} Oct 07 07:56:18 crc kubenswrapper[4875]: I1007 07:56:18.729718 4875 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 07:56:18 crc kubenswrapper[4875]: I1007 07:56:18.730852 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:18 crc kubenswrapper[4875]: I1007 07:56:18.730894 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:18 crc kubenswrapper[4875]: I1007 07:56:18.730906 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:18 crc kubenswrapper[4875]: I1007 07:56:18.732228 4875 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 07:56:18 crc kubenswrapper[4875]: I1007 07:56:18.732529 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"d8a810dc9c2d8d92af77e0b4c1a21ab24e7da3de04f9c1b00b8399ec0b2cf4ca"} Oct 07 07:56:18 crc kubenswrapper[4875]: I1007 07:56:18.732567 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"c8f4b6a57610b6440afa3652ea5c2926072ad3bc658c72c30c8d28a2832cf900"} Oct 07 07:56:18 crc kubenswrapper[4875]: I1007 07:56:18.732581 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"ce6527e17c61ccce5c81a87b9a6d3139c9f17a0736c08dfbb8907610d6270adb"} Oct 07 07:56:18 crc kubenswrapper[4875]: I1007 07:56:18.732627 4875 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 07:56:18 crc kubenswrapper[4875]: I1007 07:56:18.733053 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:18 crc kubenswrapper[4875]: I1007 07:56:18.733104 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:18 crc kubenswrapper[4875]: I1007 07:56:18.733116 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:18 crc kubenswrapper[4875]: I1007 07:56:18.733941 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:18 crc kubenswrapper[4875]: I1007 07:56:18.733976 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:18 crc kubenswrapper[4875]: I1007 07:56:18.733989 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:18 crc kubenswrapper[4875]: I1007 07:56:18.884963 4875 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 07:56:18 crc kubenswrapper[4875]: I1007 07:56:18.886294 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:18 crc kubenswrapper[4875]: I1007 07:56:18.886324 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:18 crc kubenswrapper[4875]: I1007 07:56:18.886332 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:18 crc kubenswrapper[4875]: I1007 07:56:18.886355 4875 kubelet_node_status.go:76] "Attempting to register node" node="crc" Oct 07 07:56:18 crc kubenswrapper[4875]: E1007 07:56:18.886707 4875 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.65:6443: connect: connection refused" node="crc" Oct 07 07:56:19 crc kubenswrapper[4875]: I1007 07:56:19.738000 4875 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="1ae0cbc996e8c2d24e8fbbf34b292d994bcc704c7b27fdc8b7fb7e64be0902ca" exitCode=0 Oct 07 07:56:19 crc kubenswrapper[4875]: I1007 07:56:19.738110 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"1ae0cbc996e8c2d24e8fbbf34b292d994bcc704c7b27fdc8b7fb7e64be0902ca"} Oct 07 07:56:19 crc kubenswrapper[4875]: I1007 07:56:19.740921 4875 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 07:56:19 crc kubenswrapper[4875]: I1007 07:56:19.742597 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:19 crc kubenswrapper[4875]: I1007 07:56:19.742648 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:19 crc kubenswrapper[4875]: I1007 07:56:19.742666 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:19 crc kubenswrapper[4875]: I1007 07:56:19.744016 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"ed3180f0badc58d1421537275fe9a04a086a483149f54192c9d8eae4b4c3dea2"} Oct 07 07:56:19 crc kubenswrapper[4875]: I1007 07:56:19.744078 4875 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 07:56:19 crc kubenswrapper[4875]: I1007 07:56:19.744119 4875 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 07:56:19 crc kubenswrapper[4875]: I1007 07:56:19.744155 4875 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 07 07:56:19 crc kubenswrapper[4875]: I1007 07:56:19.744199 4875 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 07:56:19 crc kubenswrapper[4875]: I1007 07:56:19.745213 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:19 crc kubenswrapper[4875]: I1007 07:56:19.745277 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:19 crc kubenswrapper[4875]: I1007 07:56:19.745302 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:19 crc kubenswrapper[4875]: I1007 07:56:19.745328 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:19 crc kubenswrapper[4875]: I1007 07:56:19.745347 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:19 crc kubenswrapper[4875]: I1007 07:56:19.745357 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:19 crc kubenswrapper[4875]: I1007 07:56:19.745479 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:19 crc kubenswrapper[4875]: I1007 07:56:19.745517 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:19 crc kubenswrapper[4875]: I1007 07:56:19.745540 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:20 crc kubenswrapper[4875]: I1007 07:56:20.511043 4875 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 07 07:56:20 crc kubenswrapper[4875]: I1007 07:56:20.511280 4875 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 07:56:20 crc kubenswrapper[4875]: I1007 07:56:20.512357 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:20 crc kubenswrapper[4875]: I1007 07:56:20.512392 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:20 crc kubenswrapper[4875]: I1007 07:56:20.512403 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:20 crc kubenswrapper[4875]: I1007 07:56:20.519496 4875 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 07 07:56:20 crc kubenswrapper[4875]: I1007 07:56:20.750453 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"6dc29635af9843109e3c200fc756d352e1a81b01dcc52a30a005291d1fa9f8d5"} Oct 07 07:56:20 crc kubenswrapper[4875]: I1007 07:56:20.750509 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"51d55feee1c9378afae0885cd063f39a2b4bd057db72c776941837d8d4098132"} Oct 07 07:56:20 crc kubenswrapper[4875]: I1007 07:56:20.750530 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"7149243f8781d5a5e845821a8016bc06ec3678d69dfbde72d3429c48c64a98b8"} Oct 07 07:56:20 crc kubenswrapper[4875]: I1007 07:56:20.750549 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"20382715fd9d79a3b60b16d950d6c4fe5b0d34ae14ad034f684d97762205263a"} Oct 07 07:56:20 crc kubenswrapper[4875]: I1007 07:56:20.750569 4875 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 07:56:20 crc kubenswrapper[4875]: I1007 07:56:20.750603 4875 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 07 07:56:20 crc kubenswrapper[4875]: I1007 07:56:20.750663 4875 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 07:56:20 crc kubenswrapper[4875]: I1007 07:56:20.751619 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:20 crc kubenswrapper[4875]: I1007 07:56:20.751664 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:20 crc kubenswrapper[4875]: I1007 07:56:20.751677 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:20 crc kubenswrapper[4875]: I1007 07:56:20.751639 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:20 crc kubenswrapper[4875]: I1007 07:56:20.751742 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:20 crc kubenswrapper[4875]: I1007 07:56:20.751755 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:21 crc kubenswrapper[4875]: I1007 07:56:21.188094 4875 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 07 07:56:21 crc kubenswrapper[4875]: I1007 07:56:21.321605 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 07 07:56:21 crc kubenswrapper[4875]: I1007 07:56:21.759306 4875 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 07 07:56:21 crc kubenswrapper[4875]: I1007 07:56:21.759404 4875 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 07:56:21 crc kubenswrapper[4875]: I1007 07:56:21.759416 4875 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 07:56:21 crc kubenswrapper[4875]: I1007 07:56:21.759301 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"9e0b3573713f1b9d23177e5a1412d8db366ca718e6c19f6af9e234940505f2e4"} Oct 07 07:56:21 crc kubenswrapper[4875]: I1007 07:56:21.760647 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:21 crc kubenswrapper[4875]: I1007 07:56:21.760707 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:21 crc kubenswrapper[4875]: I1007 07:56:21.760727 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:21 crc kubenswrapper[4875]: I1007 07:56:21.760977 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:21 crc kubenswrapper[4875]: I1007 07:56:21.761057 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:21 crc kubenswrapper[4875]: I1007 07:56:21.761084 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:22 crc kubenswrapper[4875]: I1007 07:56:22.087188 4875 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 07:56:22 crc kubenswrapper[4875]: I1007 07:56:22.088871 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:22 crc kubenswrapper[4875]: I1007 07:56:22.088945 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:22 crc kubenswrapper[4875]: I1007 07:56:22.088954 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:22 crc kubenswrapper[4875]: I1007 07:56:22.089000 4875 kubelet_node_status.go:76] "Attempting to register node" node="crc" Oct 07 07:56:22 crc kubenswrapper[4875]: I1007 07:56:22.636653 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-etcd/etcd-crc" Oct 07 07:56:22 crc kubenswrapper[4875]: I1007 07:56:22.761746 4875 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 07:56:22 crc kubenswrapper[4875]: I1007 07:56:22.762598 4875 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 07 07:56:22 crc kubenswrapper[4875]: I1007 07:56:22.762642 4875 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 07:56:22 crc kubenswrapper[4875]: I1007 07:56:22.766995 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:22 crc kubenswrapper[4875]: I1007 07:56:22.767051 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:22 crc kubenswrapper[4875]: I1007 07:56:22.767059 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:22 crc kubenswrapper[4875]: I1007 07:56:22.767071 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:22 crc kubenswrapper[4875]: I1007 07:56:22.767132 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:22 crc kubenswrapper[4875]: I1007 07:56:22.767174 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:23 crc kubenswrapper[4875]: I1007 07:56:23.763264 4875 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 07:56:23 crc kubenswrapper[4875]: I1007 07:56:23.764731 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:23 crc kubenswrapper[4875]: I1007 07:56:23.764775 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:23 crc kubenswrapper[4875]: I1007 07:56:23.764785 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:24 crc kubenswrapper[4875]: I1007 07:56:24.495918 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 07 07:56:24 crc kubenswrapper[4875]: I1007 07:56:24.496147 4875 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 07:56:24 crc kubenswrapper[4875]: I1007 07:56:24.497848 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:24 crc kubenswrapper[4875]: I1007 07:56:24.498098 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:24 crc kubenswrapper[4875]: I1007 07:56:24.498236 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:25 crc kubenswrapper[4875]: I1007 07:56:25.204217 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 07 07:56:25 crc kubenswrapper[4875]: I1007 07:56:25.204408 4875 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 07:56:25 crc kubenswrapper[4875]: I1007 07:56:25.205775 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:25 crc kubenswrapper[4875]: I1007 07:56:25.205814 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:25 crc kubenswrapper[4875]: I1007 07:56:25.205830 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:25 crc kubenswrapper[4875]: I1007 07:56:25.477187 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 07 07:56:25 crc kubenswrapper[4875]: I1007 07:56:25.613793 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 07 07:56:25 crc kubenswrapper[4875]: I1007 07:56:25.614075 4875 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 07:56:25 crc kubenswrapper[4875]: I1007 07:56:25.615445 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:25 crc kubenswrapper[4875]: I1007 07:56:25.615506 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:25 crc kubenswrapper[4875]: I1007 07:56:25.615526 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:25 crc kubenswrapper[4875]: I1007 07:56:25.623704 4875 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-etcd/etcd-crc" Oct 07 07:56:25 crc kubenswrapper[4875]: I1007 07:56:25.623931 4875 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 07:56:25 crc kubenswrapper[4875]: I1007 07:56:25.625070 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:25 crc kubenswrapper[4875]: I1007 07:56:25.625127 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:25 crc kubenswrapper[4875]: I1007 07:56:25.625152 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:25 crc kubenswrapper[4875]: I1007 07:56:25.767823 4875 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 07:56:25 crc kubenswrapper[4875]: I1007 07:56:25.769125 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:25 crc kubenswrapper[4875]: I1007 07:56:25.769196 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:25 crc kubenswrapper[4875]: I1007 07:56:25.769215 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:25 crc kubenswrapper[4875]: E1007 07:56:25.783817 4875 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Oct 07 07:56:26 crc kubenswrapper[4875]: I1007 07:56:26.557139 4875 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 07 07:56:26 crc kubenswrapper[4875]: I1007 07:56:26.772149 4875 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 07:56:26 crc kubenswrapper[4875]: I1007 07:56:26.775245 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:26 crc kubenswrapper[4875]: I1007 07:56:26.775305 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:26 crc kubenswrapper[4875]: I1007 07:56:26.775319 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:26 crc kubenswrapper[4875]: I1007 07:56:26.779999 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 07 07:56:27 crc kubenswrapper[4875]: I1007 07:56:27.774278 4875 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 07:56:27 crc kubenswrapper[4875]: I1007 07:56:27.775136 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:27 crc kubenswrapper[4875]: I1007 07:56:27.775163 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:27 crc kubenswrapper[4875]: I1007 07:56:27.775174 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:29 crc kubenswrapper[4875]: W1007 07:56:29.427647 4875 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": net/http: TLS handshake timeout Oct 07 07:56:29 crc kubenswrapper[4875]: I1007 07:56:29.427734 4875 trace.go:236] Trace[471114749]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (07-Oct-2025 07:56:19.425) (total time: 10001ms): Oct 07 07:56:29 crc kubenswrapper[4875]: Trace[471114749]: ---"Objects listed" error:Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": net/http: TLS handshake timeout 10001ms (07:56:29.427) Oct 07 07:56:29 crc kubenswrapper[4875]: Trace[471114749]: [10.001714399s] [10.001714399s] END Oct 07 07:56:29 crc kubenswrapper[4875]: E1007 07:56:29.427755 4875 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": net/http: TLS handshake timeout" logger="UnhandledError" Oct 07 07:56:29 crc kubenswrapper[4875]: I1007 07:56:29.557457 4875 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Oct 07 07:56:29 crc kubenswrapper[4875]: I1007 07:56:29.557537 4875 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Oct 07 07:56:29 crc kubenswrapper[4875]: I1007 07:56:29.647438 4875 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": net/http: TLS handshake timeout Oct 07 07:56:29 crc kubenswrapper[4875]: I1007 07:56:29.781929 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Oct 07 07:56:29 crc kubenswrapper[4875]: I1007 07:56:29.783697 4875 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="ed3180f0badc58d1421537275fe9a04a086a483149f54192c9d8eae4b4c3dea2" exitCode=255 Oct 07 07:56:29 crc kubenswrapper[4875]: I1007 07:56:29.783747 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"ed3180f0badc58d1421537275fe9a04a086a483149f54192c9d8eae4b4c3dea2"} Oct 07 07:56:29 crc kubenswrapper[4875]: I1007 07:56:29.783870 4875 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 07:56:29 crc kubenswrapper[4875]: I1007 07:56:29.784555 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:29 crc kubenswrapper[4875]: I1007 07:56:29.784594 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:29 crc kubenswrapper[4875]: I1007 07:56:29.784604 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:29 crc kubenswrapper[4875]: I1007 07:56:29.785207 4875 scope.go:117] "RemoveContainer" containerID="ed3180f0badc58d1421537275fe9a04a086a483149f54192c9d8eae4b4c3dea2" Oct 07 07:56:29 crc kubenswrapper[4875]: W1007 07:56:29.882216 4875 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": net/http: TLS handshake timeout Oct 07 07:56:29 crc kubenswrapper[4875]: I1007 07:56:29.882312 4875 trace.go:236] Trace[475260252]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (07-Oct-2025 07:56:19.878) (total time: 10003ms): Oct 07 07:56:29 crc kubenswrapper[4875]: Trace[475260252]: ---"Objects listed" error:Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": net/http: TLS handshake timeout 10003ms (07:56:29.882) Oct 07 07:56:29 crc kubenswrapper[4875]: Trace[475260252]: [10.003593037s] [10.003593037s] END Oct 07 07:56:29 crc kubenswrapper[4875]: E1007 07:56:29.882334 4875 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": net/http: TLS handshake timeout" logger="UnhandledError" Oct 07 07:56:29 crc kubenswrapper[4875]: I1007 07:56:29.975675 4875 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Oct 07 07:56:29 crc kubenswrapper[4875]: I1007 07:56:29.975729 4875 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Oct 07 07:56:29 crc kubenswrapper[4875]: I1007 07:56:29.980605 4875 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Oct 07 07:56:29 crc kubenswrapper[4875]: I1007 07:56:29.980660 4875 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Oct 07 07:56:30 crc kubenswrapper[4875]: I1007 07:56:30.792199 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Oct 07 07:56:30 crc kubenswrapper[4875]: I1007 07:56:30.794437 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"671f4d3e518bf0230af61a6bfc3f70b8cea7f0e8f972ad605a78cded0306b084"} Oct 07 07:56:30 crc kubenswrapper[4875]: I1007 07:56:30.794606 4875 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 07:56:30 crc kubenswrapper[4875]: I1007 07:56:30.795669 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:30 crc kubenswrapper[4875]: I1007 07:56:30.795719 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:30 crc kubenswrapper[4875]: I1007 07:56:30.795735 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:31 crc kubenswrapper[4875]: I1007 07:56:31.191604 4875 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 07 07:56:31 crc kubenswrapper[4875]: I1007 07:56:31.325796 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 07 07:56:31 crc kubenswrapper[4875]: I1007 07:56:31.796959 4875 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 07:56:31 crc kubenswrapper[4875]: I1007 07:56:31.797045 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 07 07:56:31 crc kubenswrapper[4875]: I1007 07:56:31.797864 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:31 crc kubenswrapper[4875]: I1007 07:56:31.797984 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:31 crc kubenswrapper[4875]: I1007 07:56:31.798005 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:32 crc kubenswrapper[4875]: I1007 07:56:32.799727 4875 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 07:56:32 crc kubenswrapper[4875]: I1007 07:56:32.801066 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:32 crc kubenswrapper[4875]: I1007 07:56:32.801112 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:32 crc kubenswrapper[4875]: I1007 07:56:32.801130 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:32 crc kubenswrapper[4875]: I1007 07:56:32.844476 4875 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Oct 07 07:56:34 crc kubenswrapper[4875]: I1007 07:56:34.556832 4875 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Oct 07 07:56:34 crc kubenswrapper[4875]: E1007 07:56:34.973253 4875 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": context deadline exceeded" interval="6.4s" Oct 07 07:56:34 crc kubenswrapper[4875]: I1007 07:56:34.977964 4875 trace.go:236] Trace[209959304]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (07-Oct-2025 07:56:24.112) (total time: 10865ms): Oct 07 07:56:34 crc kubenswrapper[4875]: Trace[209959304]: ---"Objects listed" error: 10865ms (07:56:34.977) Oct 07 07:56:34 crc kubenswrapper[4875]: Trace[209959304]: [10.865873739s] [10.865873739s] END Oct 07 07:56:34 crc kubenswrapper[4875]: I1007 07:56:34.978004 4875 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Oct 07 07:56:34 crc kubenswrapper[4875]: I1007 07:56:34.978319 4875 trace.go:236] Trace[964824416]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (07-Oct-2025 07:56:21.820) (total time: 13158ms): Oct 07 07:56:34 crc kubenswrapper[4875]: Trace[964824416]: ---"Objects listed" error: 13157ms (07:56:34.978) Oct 07 07:56:34 crc kubenswrapper[4875]: Trace[964824416]: [13.158204312s] [13.158204312s] END Oct 07 07:56:34 crc kubenswrapper[4875]: I1007 07:56:34.978368 4875 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Oct 07 07:56:34 crc kubenswrapper[4875]: I1007 07:56:34.978634 4875 reconstruct.go:205] "DevicePaths of reconstructed volumes updated" Oct 07 07:56:34 crc kubenswrapper[4875]: E1007 07:56:34.979764 4875 kubelet_node_status.go:99] "Unable to register node with API server" err="nodes \"crc\" is forbidden: autoscaling.openshift.io/ManagedNode infra config cache not synchronized" node="crc" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.642903 4875 apiserver.go:52] "Watching apiserver" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.645030 4875 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.645316 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-network-diagnostics/network-check-target-xd92c","openshift-network-node-identity/network-node-identity-vrzqb","openshift-network-operator/iptables-alerter-4ln5h","openshift-network-operator/network-operator-58b4c7f79c-55gtf","openshift-network-console/networking-console-plugin-85b44fc459-gdk6g","openshift-network-diagnostics/network-check-source-55646444c4-trplf"] Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.645657 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.645705 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.645762 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.645798 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 07:56:35 crc kubenswrapper[4875]: E1007 07:56:35.645798 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.645826 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 07 07:56:35 crc kubenswrapper[4875]: E1007 07:56:35.645857 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 07:56:35 crc kubenswrapper[4875]: E1007 07:56:35.645894 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.645917 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.647805 4875 desired_state_of_world_populator.go:154] "Finished populating initial desired state of world" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.648098 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.648438 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.648540 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.648566 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.648684 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.648941 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.649117 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.650683 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.651235 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.675981 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.677046 4875 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-etcd/etcd-crc" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.683497 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.683542 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.683581 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.683602 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.683624 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.683645 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.683666 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.683684 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.683706 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.683730 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.683755 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.683779 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.683800 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.683821 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.683843 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.683889 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.683915 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.683939 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.683976 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.683997 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.684020 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.684042 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.684063 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.684097 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.684120 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.684142 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.684167 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.684190 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.684213 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.684235 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.684255 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.684274 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.684307 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.684329 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") pod \"49ef4625-1d3a-4a9f-b595-c2433d32326d\" (UID: \"49ef4625-1d3a-4a9f-b595-c2433d32326d\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.684362 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.684383 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.684408 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.684430 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.684450 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.684472 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.684494 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.684514 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.684536 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.684567 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.684589 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.684612 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.684652 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.684678 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.684700 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.684722 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.684743 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.684767 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.684791 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.684814 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.684836 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.684861 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.684921 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.684947 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.684970 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.684991 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.685012 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.685035 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.685059 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.685083 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.685108 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.685132 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.685154 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") pod \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\" (UID: \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.685178 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.685203 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.685228 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.685251 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.685273 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.685296 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.685317 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.685339 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.685362 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.685386 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") pod \"44663579-783b-4372-86d6-acf235a62d72\" (UID: \"44663579-783b-4372-86d6-acf235a62d72\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.685407 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.685429 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.685450 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.685473 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.685494 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.685517 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.685538 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.685561 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.685582 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.685603 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.685626 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.685649 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.685671 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.685705 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.685727 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.685748 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.685771 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.685794 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.685818 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.685842 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.685867 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.685906 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.685929 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.685952 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.685973 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.685996 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.686018 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.686041 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.686064 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.686086 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.686135 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.686161 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.686188 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.686213 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.686236 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.686261 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.686284 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.686307 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.686329 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.686351 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.686374 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.686397 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.686422 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.686447 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.686471 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.686495 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.686524 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.686547 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.686572 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.686594 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.686617 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.686642 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.686665 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.686686 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.686709 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.686733 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.686757 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.686781 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.686805 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.686834 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.686858 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.686897 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.686922 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.686947 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.686972 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.686995 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.687021 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.687046 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.687072 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.687094 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.687119 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.687144 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.687175 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.687200 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.687222 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.687254 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.687281 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.687303 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") pod \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\" (UID: \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.687346 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.687372 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.687396 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.687419 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.687444 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.687469 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.687493 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.687518 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.687542 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.687655 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.687689 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.687715 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.687760 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.687788 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.687814 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.687842 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.687905 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.687944 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.688011 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.688037 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.688061 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.688085 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.688110 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.695917 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.696009 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.696055 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.696098 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.696301 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.696362 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.696398 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.696434 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.696470 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.696501 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.696535 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.696571 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.696648 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.696688 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.696723 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.696843 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.696920 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.697013 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.697084 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.697121 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.697543 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.697658 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.697698 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.697726 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.697778 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.697814 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.697869 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.697919 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.697953 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.698029 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.698068 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.684919 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" (OuterVolumeSpecName: "signing-key") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.706665 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.706764 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.707129 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.707179 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" (OuterVolumeSpecName: "config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.707479 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.707603 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.708347 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.708856 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" (OuterVolumeSpecName: "config") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.708949 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.709153 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" (OuterVolumeSpecName: "node-bootstrap-token") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "node-bootstrap-token". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.709353 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.709515 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" (OuterVolumeSpecName: "kube-api-access-pcxfs") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "kube-api-access-pcxfs". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.713282 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" (OuterVolumeSpecName: "utilities") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.713558 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" (OuterVolumeSpecName: "kube-api-access-2w9zh") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "kube-api-access-2w9zh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.713754 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.713989 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" (OuterVolumeSpecName: "config") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.714475 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" (OuterVolumeSpecName: "config") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.685030 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" (OuterVolumeSpecName: "webhook-certs") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "webhook-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.685040 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" (OuterVolumeSpecName: "kube-api-access-4d4hj") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "kube-api-access-4d4hj". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.685056 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.685052 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.685200 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" (OuterVolumeSpecName: "kube-api-access-xcphl") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "kube-api-access-xcphl". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.685252 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.685421 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" (OuterVolumeSpecName: "utilities") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.685432 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" (OuterVolumeSpecName: "ovn-control-plane-metrics-cert") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovn-control-plane-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.685594 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" (OuterVolumeSpecName: "client-ca") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.685608 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.685921 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" (OuterVolumeSpecName: "certs") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.686087 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.686141 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" (OuterVolumeSpecName: "kube-api-access-kfwg7") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "kube-api-access-kfwg7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.686716 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" (OuterVolumeSpecName: "image-import-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "image-import-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.686743 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.686892 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" (OuterVolumeSpecName: "kube-api-access-v47cf") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "kube-api-access-v47cf". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.686929 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" (OuterVolumeSpecName: "samples-operator-tls") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "samples-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.687099 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" (OuterVolumeSpecName: "kube-api-access-mg5zb") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "kube-api-access-mg5zb". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.687232 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.687299 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.687312 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.687553 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.687565 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" (OuterVolumeSpecName: "serviceca") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "serviceca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.687715 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.687728 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.688040 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" (OuterVolumeSpecName: "kube-api-access-lz9wn") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "kube-api-access-lz9wn". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.694845 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" (OuterVolumeSpecName: "config") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.694984 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" (OuterVolumeSpecName: "kube-api-access-x7zkh") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "kube-api-access-x7zkh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.694802 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.695246 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.695519 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.695541 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" (OuterVolumeSpecName: "kube-api-access-6ccd8") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "kube-api-access-6ccd8". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.695694 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.695784 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.696187 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" (OuterVolumeSpecName: "package-server-manager-serving-cert") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "package-server-manager-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.696451 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.696539 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.697020 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.697364 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.697575 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.697596 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" (OuterVolumeSpecName: "client-ca") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.697759 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" (OuterVolumeSpecName: "default-certificate") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "default-certificate". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: E1007 07:56:35.698354 4875 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.699595 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" (OuterVolumeSpecName: "kube-api-access-wxkg8") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "kube-api-access-wxkg8". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.699599 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" (OuterVolumeSpecName: "kube-api-access-w4xd4") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "kube-api-access-w4xd4". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.699945 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" (OuterVolumeSpecName: "kube-api-access-xcgwh") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "kube-api-access-xcgwh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.701283 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.701589 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" (OuterVolumeSpecName: "kube-api-access-2d4wz") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "kube-api-access-2d4wz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.701600 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.702106 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" (OuterVolumeSpecName: "kube-api-access-mnrrd") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "kube-api-access-mnrrd". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.702694 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.703055 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.703217 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.703189 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.704628 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" (OuterVolumeSpecName: "kube-api-access-sb6h7") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "kube-api-access-sb6h7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.705310 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.705403 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" (OuterVolumeSpecName: "config") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.705652 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.705729 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" (OuterVolumeSpecName: "config-volume") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.705780 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.705958 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.706196 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.706244 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.706422 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" (OuterVolumeSpecName: "machine-approver-tls") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "machine-approver-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.714926 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.714967 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" (OuterVolumeSpecName: "kube-api-access-d6qdx") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "kube-api-access-d6qdx". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.715040 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" (OuterVolumeSpecName: "kube-api-access-pjr6v") pod "49ef4625-1d3a-4a9f-b595-c2433d32326d" (UID: "49ef4625-1d3a-4a9f-b595-c2433d32326d"). InnerVolumeSpecName "kube-api-access-pjr6v". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.715294 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" (OuterVolumeSpecName: "kube-api-access-7c4vf") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "kube-api-access-7c4vf". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.715707 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.716178 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" (OuterVolumeSpecName: "kube-api-access-nzwt7") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "kube-api-access-nzwt7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.716237 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.716318 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" (OuterVolumeSpecName: "etcd-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.716468 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.716726 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" (OuterVolumeSpecName: "image-registry-operator-tls") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "image-registry-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.716847 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.716861 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" (OuterVolumeSpecName: "kube-api-access-zgdk5") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "kube-api-access-zgdk5". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.717002 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" (OuterVolumeSpecName: "kube-api-access-bf2bz") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "kube-api-access-bf2bz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.717328 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" (OuterVolumeSpecName: "kube-api-access-zkvpv") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "kube-api-access-zkvpv". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.716422 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.717349 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.717030 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" (OuterVolumeSpecName: "kube-api-access-fcqwp") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "kube-api-access-fcqwp". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.717170 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" (OuterVolumeSpecName: "machine-api-operator-tls") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "machine-api-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.717184 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.717423 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.717473 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.717732 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" (OuterVolumeSpecName: "control-plane-machine-set-operator-tls") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "control-plane-machine-set-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.717779 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.717779 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" (OuterVolumeSpecName: "config") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.717901 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.717964 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" (OuterVolumeSpecName: "console-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.717964 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.718240 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.718280 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" (OuterVolumeSpecName: "kube-api-access-8tdtz") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "kube-api-access-8tdtz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.718360 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.717385 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" (OuterVolumeSpecName: "kube-api-access-d4lsv") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "kube-api-access-d4lsv". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.718414 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.718470 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.717567 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.718662 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.717993 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" (OuterVolumeSpecName: "config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.718851 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" (OuterVolumeSpecName: "cert") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.718511 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" (OuterVolumeSpecName: "mcd-auth-proxy-config") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "mcd-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.719151 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.719187 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" (OuterVolumeSpecName: "kube-api-access-s4n52") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "kube-api-access-s4n52". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.719184 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.719404 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" (OuterVolumeSpecName: "kube-api-access-lzf88") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "kube-api-access-lzf88". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.719426 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.719486 4875 swap_util.go:74] "error creating dir to test if tmpfs noswap is enabled. Assuming not supported" mount path="" error="stat /var/lib/kubelet/plugins/kubernetes.io/empty-dir: no such file or directory" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.719549 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" (OuterVolumeSpecName: "service-ca") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.719560 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.719615 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" (OuterVolumeSpecName: "images") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.719968 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" (OuterVolumeSpecName: "mcc-auth-proxy-config") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "mcc-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.719705 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" (OuterVolumeSpecName: "kube-api-access-fqsjt") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "kube-api-access-fqsjt". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: E1007 07:56:35.720037 4875 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.720275 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" (OuterVolumeSpecName: "config") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.720654 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" (OuterVolumeSpecName: "etcd-service-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.720668 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.719033 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.720408 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.720361 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.721312 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" (OuterVolumeSpecName: "kube-api-access-x2m85") pod "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" (UID: "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d"). InnerVolumeSpecName "kube-api-access-x2m85". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: E1007 07:56:35.721454 4875 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-07 07:56:36.220845257 +0000 UTC m=+21.180615800 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.726098 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.721470 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.721221 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" (OuterVolumeSpecName: "kube-api-access-jhbk2") pod "bd23aa5c-e532-4e53-bccf-e79f130c5ae8" (UID: "bd23aa5c-e532-4e53-bccf-e79f130c5ae8"). InnerVolumeSpecName "kube-api-access-jhbk2". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.721802 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" (OuterVolumeSpecName: "kube-api-access-w9rds") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "kube-api-access-w9rds". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.726280 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.722219 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" (OuterVolumeSpecName: "kube-api-access-ngvvp") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "kube-api-access-ngvvp". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.722347 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.722546 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" (OuterVolumeSpecName: "kube-api-access-w7l8j") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "kube-api-access-w7l8j". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.722676 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" (OuterVolumeSpecName: "kube-api-access-6g6sz") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "kube-api-access-6g6sz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: E1007 07:56:35.726364 4875 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-07 07:56:36.226293452 +0000 UTC m=+21.186064005 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.722700 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.723402 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" (OuterVolumeSpecName: "config") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.723679 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" (OuterVolumeSpecName: "audit") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "audit". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.723688 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" (OuterVolumeSpecName: "kube-api-access-qg5z5") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "kube-api-access-qg5z5". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.724274 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.724309 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" (OuterVolumeSpecName: "kube-api-access-279lb") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "kube-api-access-279lb". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.724671 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" (OuterVolumeSpecName: "multus-daemon-config") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "multus-daemon-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.724983 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.725853 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.727605 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.728269 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" (OuterVolumeSpecName: "kube-api-access-9xfj7") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "kube-api-access-9xfj7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.728409 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" (OuterVolumeSpecName: "kube-api-access-tk88c") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "kube-api-access-tk88c". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.728521 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" (OuterVolumeSpecName: "kube-api-access-pj782") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "kube-api-access-pj782". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.728796 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" (OuterVolumeSpecName: "utilities") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.728997 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" (OuterVolumeSpecName: "config") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.729140 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.729452 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.729533 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" (OuterVolumeSpecName: "available-featuregates") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "available-featuregates". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.729763 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" (OuterVolumeSpecName: "kube-api-access-rnphk") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "kube-api-access-rnphk". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.729825 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.730124 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.730142 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.730389 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" (OuterVolumeSpecName: "kube-api-access-gf66m") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "kube-api-access-gf66m". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.730565 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" (OuterVolumeSpecName: "kube-api-access-x4zgh") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "kube-api-access-x4zgh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.730614 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" (OuterVolumeSpecName: "kube-api-access-jkwtn") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "kube-api-access-jkwtn". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.731143 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" (OuterVolumeSpecName: "utilities") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: E1007 07:56:35.731251 4875 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 07:56:36.2312332 +0000 UTC m=+21.191003743 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.731576 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.731803 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" (OuterVolumeSpecName: "signing-cabundle") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-cabundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.731838 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.732184 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" (OuterVolumeSpecName: "config") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.732473 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" (OuterVolumeSpecName: "kube-api-access-cfbct") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "kube-api-access-cfbct". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.732662 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.732931 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" (OuterVolumeSpecName: "kube-api-access-htfz6") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "kube-api-access-htfz6". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.733193 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" (OuterVolumeSpecName: "kube-api-access-249nr") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "kube-api-access-249nr". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.733288 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.733690 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" (OuterVolumeSpecName: "tmpfs") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "tmpfs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.733984 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.734332 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" (OuterVolumeSpecName: "kube-api-access-qs4fp") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "kube-api-access-qs4fp". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.734426 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" (OuterVolumeSpecName: "cni-sysctl-allowlist") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-sysctl-allowlist". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.734807 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.735183 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" (OuterVolumeSpecName: "kube-api-access-vt5rc") pod "44663579-783b-4372-86d6-acf235a62d72" (UID: "44663579-783b-4372-86d6-acf235a62d72"). InnerVolumeSpecName "kube-api-access-vt5rc". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.735215 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" (OuterVolumeSpecName: "config") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.735686 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.736298 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" (OuterVolumeSpecName: "stats-auth") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "stats-auth". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.736521 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: E1007 07:56:35.736962 4875 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 07 07:56:35 crc kubenswrapper[4875]: E1007 07:56:35.736988 4875 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 07 07:56:35 crc kubenswrapper[4875]: E1007 07:56:35.737005 4875 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.737005 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" (OuterVolumeSpecName: "service-ca") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.737073 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" (OuterVolumeSpecName: "images") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: E1007 07:56:35.737078 4875 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-07 07:56:36.237060077 +0000 UTC m=+21.196830620 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.737236 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" (OuterVolumeSpecName: "config") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.738000 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" (OuterVolumeSpecName: "kube-api-access-dbsvg") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "kube-api-access-dbsvg". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.738085 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 07 07:56:35 crc kubenswrapper[4875]: E1007 07:56:35.743242 4875 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 07 07:56:35 crc kubenswrapper[4875]: E1007 07:56:35.743278 4875 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 07 07:56:35 crc kubenswrapper[4875]: E1007 07:56:35.743296 4875 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 07 07:56:35 crc kubenswrapper[4875]: E1007 07:56:35.743362 4875 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-07 07:56:36.243339379 +0000 UTC m=+21.203109922 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.743562 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.743914 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.744732 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="01ab3dd5-8196-46d0-ad33-122e2ca51def" path="/var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.744756 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.745692 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" path="/var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.747210 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09efc573-dbb6-4249-bd59-9b87aba8dd28" path="/var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.747827 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b574797-001e-440a-8f4e-c0be86edad0f" path="/var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.748211 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.751061 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b78653f-4ff9-4508-8672-245ed9b561e3" path="/var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.752975 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1386a44e-36a2-460c-96d0-0359d2b6f0f5" path="/var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.754004 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1bf7eb37-55a3-4c65-b768-a94c82151e69" path="/var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.754299 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.754996 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.755107 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d611f23-29be-4491-8495-bee1670e935f" path="/var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.758013 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="20b0d48f-5fd6-431c-a545-e3c800c7b866" path="/var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/volumes" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.758221 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" (OuterVolumeSpecName: "config") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.759027 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" path="/var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.759693 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22c825df-677d-4ca6-82db-3454ed06e783" path="/var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.761423 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25e176fe-21b4-4974-b1ed-c8b94f112a7f" path="/var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.762152 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" path="/var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.763058 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.763449 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31d8b7a1-420e-4252-a5b7-eebe8a111292" path="/var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.764509 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ab1a177-2de0-46d9-b765-d0d0649bb42e" path="/var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/volumes" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.765094 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.765437 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" path="/var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.766476 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43509403-f426-496e-be36-56cef71462f5" path="/var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.767080 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="44663579-783b-4372-86d6-acf235a62d72" path="/var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/volumes" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.768500 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="496e6271-fb68-4057-954e-a0d97a4afa3f" path="/var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.769293 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" path="/var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.769606 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.769825 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49ef4625-1d3a-4a9f-b595-c2433d32326d" path="/var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/volumes" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.771136 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4bb40260-dbaa-4fb0-84df-5e680505d512" path="/var/lib/kubelet/pods/4bb40260-dbaa-4fb0-84df-5e680505d512/volumes" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.771712 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5441d097-087c-4d9a-baa8-b210afa90fc9" path="/var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.772811 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57a731c4-ef35-47a8-b875-bfb08a7f8011" path="/var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.773481 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b88f790-22fa-440e-b583-365168c0b23d" path="/var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/volumes" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.773842 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.774663 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5fe579f8-e8a6-4643-bce5-a661393c4dde" path="/var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/volumes" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.775255 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6402fda4-df10-493c-b4e5-d0569419652d" path="/var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.776436 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6509e943-70c6-444c-bc41-48a544e36fbd" path="/var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.776431 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.777030 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6731426b-95fe-49ff-bb5f-40441049fde2" path="/var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/volumes" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.777515 4875 kubelet_volumes.go:152] "Cleaned up orphaned volume subpath from pod" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volume-subpaths/run-systemd/ovnkube-controller/6" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.777611 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volumes" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.779830 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7539238d-5fe0-46ed-884e-1c3b566537ec" path="/var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.780548 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7583ce53-e0fe-4a16-9e4d-50516596a136" path="/var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.781709 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7bb08738-c794-4ee8-9972-3a62ca171029" path="/var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.783314 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="87cf06ed-a83f-41a7-828d-70653580a8cb" path="/var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.784286 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" path="/var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.785493 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="925f1c65-6136-48ba-85aa-3a3b50560753" path="/var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.786282 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" path="/var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/volumes" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.787555 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d4552c7-cd75-42dd-8880-30dd377c49a4" path="/var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.788137 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" path="/var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/volumes" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.788834 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a31745f5-9847-4afe-82a5-3161cc66ca93" path="/var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.789118 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.789900 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" path="/var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.790938 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6312bbd-5731-4ea0-a20f-81d5a57df44a" path="/var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/volumes" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.791743 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" path="/var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.792294 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" path="/var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.793242 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" path="/var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/volumes" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.793942 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf126b07-da06-4140-9a57-dfd54fc6b486" path="/var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.794756 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c03ee662-fb2f-4fc4-a2c1-af487c19d254" path="/var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.795232 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" path="/var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/volumes" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.796059 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7e6199b-1264-4501-8953-767f51328d08" path="/var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.796559 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="efdd0498-1daa-4136-9a4a-3b948c2293fc" path="/var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/volumes" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.797195 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" path="/var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/volumes" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.798098 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fda69060-fa79-4696-b1a6-7980f124bf7c" path="/var/lib/kubelet/pods/fda69060-fa79-4696-b1a6-7980f124bf7c/volumes" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.798619 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.798638 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-etcd/etcd-crc" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.798817 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd/etcd-crc"] Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.798820 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.798923 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.798985 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.799199 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.799272 4875 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.799319 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.799337 4875 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.799354 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.799367 4875 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.799378 4875 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.799393 4875 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.799407 4875 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.799419 4875 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.799431 4875 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.799443 4875 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.799454 4875 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.799466 4875 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.799478 4875 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.799489 4875 reconciler_common.go:293] "Volume detached for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.799501 4875 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.799513 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.799525 4875 reconciler_common.go:293] "Volume detached for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.799536 4875 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.799549 4875 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.799563 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.799574 4875 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.799584 4875 reconciler_common.go:293] "Volume detached for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.799596 4875 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.799607 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.799620 4875 reconciler_common.go:293] "Volume detached for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.799632 4875 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.799644 4875 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.799656 4875 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.799698 4875 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.799713 4875 reconciler_common.go:293] "Volume detached for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.799724 4875 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.799736 4875 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.799747 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.799765 4875 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.799776 4875 reconciler_common.go:293] "Volume detached for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.799787 4875 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.799798 4875 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.799810 4875 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.799822 4875 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.799836 4875 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.799849 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.799865 4875 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.799897 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.799909 4875 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.799920 4875 reconciler_common.go:293] "Volume detached for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.799931 4875 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.799945 4875 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.799986 4875 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.799998 4875 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.800009 4875 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.800022 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.800034 4875 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.800045 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.800057 4875 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.800067 4875 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.800080 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.800091 4875 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.800102 4875 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.800113 4875 reconciler_common.go:293] "Volume detached for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.800126 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.800138 4875 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.800149 4875 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.800160 4875 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.800174 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.800187 4875 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.800199 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.800210 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.800253 4875 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.800266 4875 reconciler_common.go:293] "Volume detached for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.800279 4875 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.800310 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.800326 4875 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.800512 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.800526 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.800537 4875 reconciler_common.go:293] "Volume detached for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.800550 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.800561 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.800572 4875 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.800555 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.800583 4875 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.800597 4875 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.800608 4875 reconciler_common.go:293] "Volume detached for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.800620 4875 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.800632 4875 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.800643 4875 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.800653 4875 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.800665 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.800677 4875 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.800688 4875 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.800700 4875 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.800711 4875 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.800723 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.800737 4875 reconciler_common.go:293] "Volume detached for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.800775 4875 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.800787 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.800798 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.800811 4875 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.800824 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.800835 4875 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.800866 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.800890 4875 reconciler_common.go:293] "Volume detached for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.800902 4875 reconciler_common.go:293] "Volume detached for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.800912 4875 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.800923 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.800935 4875 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.800946 4875 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.800959 4875 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.800970 4875 reconciler_common.go:293] "Volume detached for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.800980 4875 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.800992 4875 reconciler_common.go:293] "Volume detached for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.801004 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.801088 4875 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.801099 4875 reconciler_common.go:293] "Volume detached for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.801110 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.801120 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.801133 4875 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.801145 4875 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.801156 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.801166 4875 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.801177 4875 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.801188 4875 reconciler_common.go:293] "Volume detached for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.801200 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.801228 4875 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.801244 4875 reconciler_common.go:293] "Volume detached for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.801256 4875 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.801269 4875 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.801280 4875 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.801292 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.801302 4875 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.801315 4875 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.801326 4875 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.801336 4875 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.801346 4875 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.801357 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.801369 4875 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.801380 4875 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.801392 4875 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.801403 4875 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.801414 4875 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.801425 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.801438 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.801450 4875 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.801461 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.801473 4875 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.801484 4875 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.801496 4875 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.801510 4875 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.801521 4875 reconciler_common.go:293] "Volume detached for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.801532 4875 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.801544 4875 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.801577 4875 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.801608 4875 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.801622 4875 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.801632 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.801650 4875 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.801661 4875 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.801672 4875 reconciler_common.go:293] "Volume detached for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.801683 4875 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.801693 4875 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.801704 4875 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.801716 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.801728 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.801741 4875 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.801755 4875 reconciler_common.go:293] "Volume detached for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.801766 4875 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.801778 4875 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.801790 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.801801 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.801814 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.801825 4875 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.801838 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.801849 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.801860 4875 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.801871 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.801904 4875 reconciler_common.go:293] "Volume detached for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.801916 4875 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.801926 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.801938 4875 reconciler_common.go:293] "Volume detached for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.801949 4875 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.801961 4875 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.801972 4875 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.801982 4875 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.801992 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.802003 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.802014 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.802026 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.802040 4875 reconciler_common.go:293] "Volume detached for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.802052 4875 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.802064 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.802075 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.802085 4875 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.802095 4875 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.802107 4875 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.811948 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.829561 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3527b3b9-1734-481d-9d80-9093e388afec\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7149243f8781d5a5e845821a8016bc06ec3678d69dfbde72d3429c48c64a98b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51d55feee1c9378afae0885cd063f39a2b4bd057db72c776941837d8d4098132\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6dc29635af9843109e3c200fc756d352e1a81b01dcc52a30a005291d1fa9f8d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e0b3573713f1b9d23177e5a1412d8db366ca718e6c19f6af9e234940505f2e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20382715fd9d79a3b60b16d950d6c4fe5b0d34ae14ad034f684d97762205263a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cfae59bc7d6183b2b5c6c04dd53b1bfc8b62abf90f67f74c7a56bf5039a5b50a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cfae59bc7d6183b2b5c6c04dd53b1bfc8b62abf90f67f74c7a56bf5039a5b50a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88968e9f639dc3664a87729128c531283b37b7c2da6aae70905f9f63a980fb54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88968e9f639dc3664a87729128c531283b37b7c2da6aae70905f9f63a980fb54\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://1ae0cbc996e8c2d24e8fbbf34b292d994bcc704c7b27fdc8b7fb7e64be0902ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1ae0cbc996e8c2d24e8fbbf34b292d994bcc704c7b27fdc8b7fb7e64be0902ca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:15Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.840280 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.848750 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.860136 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.871214 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.880693 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.893242 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.958754 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.965102 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 07 07:56:35 crc kubenswrapper[4875]: I1007 07:56:35.972358 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 07 07:56:35 crc kubenswrapper[4875]: W1007 07:56:35.977360 4875 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podef543e1b_8068_4ea3_b32a_61027b32e95d.slice/crio-c0036f4ef5db29f8c44bebe88f80bf9537885a31d800d83279d2451ae89f9d8d WatchSource:0}: Error finding container c0036f4ef5db29f8c44bebe88f80bf9537885a31d800d83279d2451ae89f9d8d: Status 404 returned error can't find the container with id c0036f4ef5db29f8c44bebe88f80bf9537885a31d800d83279d2451ae89f9d8d Oct 07 07:56:36 crc kubenswrapper[4875]: I1007 07:56:36.305147 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 07:56:36 crc kubenswrapper[4875]: I1007 07:56:36.305503 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 07:56:36 crc kubenswrapper[4875]: I1007 07:56:36.305528 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 07:56:36 crc kubenswrapper[4875]: I1007 07:56:36.305551 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 07:56:36 crc kubenswrapper[4875]: I1007 07:56:36.305585 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 07:56:36 crc kubenswrapper[4875]: E1007 07:56:36.305663 4875 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 07 07:56:36 crc kubenswrapper[4875]: E1007 07:56:36.305709 4875 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-07 07:56:37.30569619 +0000 UTC m=+22.265466733 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 07 07:56:36 crc kubenswrapper[4875]: E1007 07:56:36.306069 4875 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 07:56:37.306060102 +0000 UTC m=+22.265830645 (durationBeforeRetry 1s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 07:56:36 crc kubenswrapper[4875]: E1007 07:56:36.306181 4875 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 07 07:56:36 crc kubenswrapper[4875]: E1007 07:56:36.306218 4875 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 07 07:56:36 crc kubenswrapper[4875]: E1007 07:56:36.306228 4875 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 07 07:56:36 crc kubenswrapper[4875]: E1007 07:56:36.306253 4875 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-07 07:56:37.306244528 +0000 UTC m=+22.266015071 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 07 07:56:36 crc kubenswrapper[4875]: E1007 07:56:36.306314 4875 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 07 07:56:36 crc kubenswrapper[4875]: E1007 07:56:36.306323 4875 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 07 07:56:36 crc kubenswrapper[4875]: E1007 07:56:36.306330 4875 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 07 07:56:36 crc kubenswrapper[4875]: E1007 07:56:36.306349 4875 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-07 07:56:37.306343211 +0000 UTC m=+22.266113744 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 07 07:56:36 crc kubenswrapper[4875]: E1007 07:56:36.306416 4875 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 07 07:56:36 crc kubenswrapper[4875]: E1007 07:56:36.306488 4875 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-07 07:56:37.306481675 +0000 UTC m=+22.266252218 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 07 07:56:36 crc kubenswrapper[4875]: I1007 07:56:36.561409 4875 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 07 07:56:36 crc kubenswrapper[4875]: I1007 07:56:36.565682 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 07 07:56:36 crc kubenswrapper[4875]: I1007 07:56:36.571923 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 07 07:56:36 crc kubenswrapper[4875]: I1007 07:56:36.582810 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 07 07:56:36 crc kubenswrapper[4875]: I1007 07:56:36.592809 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 07 07:56:36 crc kubenswrapper[4875]: I1007 07:56:36.606550 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 07 07:56:36 crc kubenswrapper[4875]: I1007 07:56:36.608572 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/kube-controller-manager-crc"] Oct 07 07:56:36 crc kubenswrapper[4875]: I1007 07:56:36.627378 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3527b3b9-1734-481d-9d80-9093e388afec\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7149243f8781d5a5e845821a8016bc06ec3678d69dfbde72d3429c48c64a98b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51d55feee1c9378afae0885cd063f39a2b4bd057db72c776941837d8d4098132\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6dc29635af9843109e3c200fc756d352e1a81b01dcc52a30a005291d1fa9f8d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e0b3573713f1b9d23177e5a1412d8db366ca718e6c19f6af9e234940505f2e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20382715fd9d79a3b60b16d950d6c4fe5b0d34ae14ad034f684d97762205263a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cfae59bc7d6183b2b5c6c04dd53b1bfc8b62abf90f67f74c7a56bf5039a5b50a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cfae59bc7d6183b2b5c6c04dd53b1bfc8b62abf90f67f74c7a56bf5039a5b50a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88968e9f639dc3664a87729128c531283b37b7c2da6aae70905f9f63a980fb54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88968e9f639dc3664a87729128c531283b37b7c2da6aae70905f9f63a980fb54\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://1ae0cbc996e8c2d24e8fbbf34b292d994bcc704c7b27fdc8b7fb7e64be0902ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1ae0cbc996e8c2d24e8fbbf34b292d994bcc704c7b27fdc8b7fb7e64be0902ca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:15Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 07 07:56:36 crc kubenswrapper[4875]: I1007 07:56:36.640414 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 07 07:56:36 crc kubenswrapper[4875]: I1007 07:56:36.649065 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 07 07:56:36 crc kubenswrapper[4875]: I1007 07:56:36.659518 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 07 07:56:36 crc kubenswrapper[4875]: I1007 07:56:36.673618 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 07 07:56:36 crc kubenswrapper[4875]: I1007 07:56:36.686024 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 07 07:56:36 crc kubenswrapper[4875]: I1007 07:56:36.696769 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 07:56:36 crc kubenswrapper[4875]: E1007 07:56:36.696898 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 07:56:36 crc kubenswrapper[4875]: I1007 07:56:36.702565 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 07 07:56:36 crc kubenswrapper[4875]: I1007 07:56:36.736595 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:36Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:36 crc kubenswrapper[4875]: I1007 07:56:36.764990 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62ae4c53-7cf1-4229-84b4-045397dbcfba\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ae8881986c1b2abd841adfadbac7dc3d73096c5366485350536c08f478f9762\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://05f20f2103458077317614bcd5338d803c52c67e7dfc562103c817d33fa5bc79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f2213a20d36b8a125c6004957d5be402e0b85bbc2165ebd964ab499dfffb877\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://841f5c2218ad3912bf23dc2df80f24ca696829ba8e2c2d9722264688b25f0846\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:15Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:36Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:36 crc kubenswrapper[4875]: I1007 07:56:36.800457 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3527b3b9-1734-481d-9d80-9093e388afec\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7149243f8781d5a5e845821a8016bc06ec3678d69dfbde72d3429c48c64a98b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51d55feee1c9378afae0885cd063f39a2b4bd057db72c776941837d8d4098132\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6dc29635af9843109e3c200fc756d352e1a81b01dcc52a30a005291d1fa9f8d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e0b3573713f1b9d23177e5a1412d8db366ca718e6c19f6af9e234940505f2e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20382715fd9d79a3b60b16d950d6c4fe5b0d34ae14ad034f684d97762205263a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cfae59bc7d6183b2b5c6c04dd53b1bfc8b62abf90f67f74c7a56bf5039a5b50a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cfae59bc7d6183b2b5c6c04dd53b1bfc8b62abf90f67f74c7a56bf5039a5b50a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88968e9f639dc3664a87729128c531283b37b7c2da6aae70905f9f63a980fb54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88968e9f639dc3664a87729128c531283b37b7c2da6aae70905f9f63a980fb54\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://1ae0cbc996e8c2d24e8fbbf34b292d994bcc704c7b27fdc8b7fb7e64be0902ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1ae0cbc996e8c2d24e8fbbf34b292d994bcc704c7b27fdc8b7fb7e64be0902ca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:15Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:36Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:36 crc kubenswrapper[4875]: I1007 07:56:36.809839 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"43e570e7b8391b0f38f3228dc0380ac8ee23e83f498989343bdee6a53c3ede41"} Oct 07 07:56:36 crc kubenswrapper[4875]: I1007 07:56:36.809923 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"f581a39d61bab0e394b3d93cc6bfeec0cb992dbf3e6db3d3b0a3fa4cf3106109"} Oct 07 07:56:36 crc kubenswrapper[4875]: I1007 07:56:36.811297 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/1.log" Oct 07 07:56:36 crc kubenswrapper[4875]: I1007 07:56:36.811703 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Oct 07 07:56:36 crc kubenswrapper[4875]: I1007 07:56:36.813154 4875 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="671f4d3e518bf0230af61a6bfc3f70b8cea7f0e8f972ad605a78cded0306b084" exitCode=255 Oct 07 07:56:36 crc kubenswrapper[4875]: I1007 07:56:36.813224 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"671f4d3e518bf0230af61a6bfc3f70b8cea7f0e8f972ad605a78cded0306b084"} Oct 07 07:56:36 crc kubenswrapper[4875]: I1007 07:56:36.813288 4875 scope.go:117] "RemoveContainer" containerID="ed3180f0badc58d1421537275fe9a04a086a483149f54192c9d8eae4b4c3dea2" Oct 07 07:56:36 crc kubenswrapper[4875]: I1007 07:56:36.814245 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"f15f782ad10776f5af5347e71ed9856134e689a2c17004dded0ff3361c9d3fde"} Oct 07 07:56:36 crc kubenswrapper[4875]: I1007 07:56:36.815192 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:36Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:36 crc kubenswrapper[4875]: I1007 07:56:36.816139 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"455d92e6c0a43eabcbdc94940cb5bb0308a747e311fb79711b5ec8586d2f290c"} Oct 07 07:56:36 crc kubenswrapper[4875]: I1007 07:56:36.816162 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"e98da4809cdbbaa0bc7a3ed73df6eeb9260101f8336e16e3e93cba2be423f957"} Oct 07 07:56:36 crc kubenswrapper[4875]: I1007 07:56:36.816171 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"c0036f4ef5db29f8c44bebe88f80bf9537885a31d800d83279d2451ae89f9d8d"} Oct 07 07:56:36 crc kubenswrapper[4875]: I1007 07:56:36.830013 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62ae4c53-7cf1-4229-84b4-045397dbcfba\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ae8881986c1b2abd841adfadbac7dc3d73096c5366485350536c08f478f9762\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://05f20f2103458077317614bcd5338d803c52c67e7dfc562103c817d33fa5bc79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f2213a20d36b8a125c6004957d5be402e0b85bbc2165ebd964ab499dfffb877\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://841f5c2218ad3912bf23dc2df80f24ca696829ba8e2c2d9722264688b25f0846\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:15Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:36Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:36 crc kubenswrapper[4875]: I1007 07:56:36.850125 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3527b3b9-1734-481d-9d80-9093e388afec\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7149243f8781d5a5e845821a8016bc06ec3678d69dfbde72d3429c48c64a98b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51d55feee1c9378afae0885cd063f39a2b4bd057db72c776941837d8d4098132\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6dc29635af9843109e3c200fc756d352e1a81b01dcc52a30a005291d1fa9f8d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e0b3573713f1b9d23177e5a1412d8db366ca718e6c19f6af9e234940505f2e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20382715fd9d79a3b60b16d950d6c4fe5b0d34ae14ad034f684d97762205263a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cfae59bc7d6183b2b5c6c04dd53b1bfc8b62abf90f67f74c7a56bf5039a5b50a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cfae59bc7d6183b2b5c6c04dd53b1bfc8b62abf90f67f74c7a56bf5039a5b50a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88968e9f639dc3664a87729128c531283b37b7c2da6aae70905f9f63a980fb54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88968e9f639dc3664a87729128c531283b37b7c2da6aae70905f9f63a980fb54\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://1ae0cbc996e8c2d24e8fbbf34b292d994bcc704c7b27fdc8b7fb7e64be0902ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1ae0cbc996e8c2d24e8fbbf34b292d994bcc704c7b27fdc8b7fb7e64be0902ca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:15Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:36Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:36 crc kubenswrapper[4875]: I1007 07:56:36.861546 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:36Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:36 crc kubenswrapper[4875]: I1007 07:56:36.873452 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:36Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:36 crc kubenswrapper[4875]: I1007 07:56:36.888529 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:36Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:36 crc kubenswrapper[4875]: I1007 07:56:36.901795 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:36Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:36 crc kubenswrapper[4875]: I1007 07:56:36.920541 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:36Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:36 crc kubenswrapper[4875]: I1007 07:56:36.938731 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43e570e7b8391b0f38f3228dc0380ac8ee23e83f498989343bdee6a53c3ede41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:36Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:36 crc kubenswrapper[4875]: I1007 07:56:36.951411 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:36Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:36 crc kubenswrapper[4875]: I1007 07:56:36.961622 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:36Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:36 crc kubenswrapper[4875]: I1007 07:56:36.971382 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:36Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:36 crc kubenswrapper[4875]: I1007 07:56:36.987065 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43e570e7b8391b0f38f3228dc0380ac8ee23e83f498989343bdee6a53c3ede41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:36Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:36 crc kubenswrapper[4875]: I1007 07:56:36.998990 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62ae4c53-7cf1-4229-84b4-045397dbcfba\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ae8881986c1b2abd841adfadbac7dc3d73096c5366485350536c08f478f9762\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://05f20f2103458077317614bcd5338d803c52c67e7dfc562103c817d33fa5bc79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f2213a20d36b8a125c6004957d5be402e0b85bbc2165ebd964ab499dfffb877\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://841f5c2218ad3912bf23dc2df80f24ca696829ba8e2c2d9722264688b25f0846\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:15Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:36Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.005210 4875 scope.go:117] "RemoveContainer" containerID="671f4d3e518bf0230af61a6bfc3f70b8cea7f0e8f972ad605a78cded0306b084" Oct 07 07:56:37 crc kubenswrapper[4875]: E1007 07:56:37.005383 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-apiserver-check-endpoints\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\"" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.005661 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.023053 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3527b3b9-1734-481d-9d80-9093e388afec\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7149243f8781d5a5e845821a8016bc06ec3678d69dfbde72d3429c48c64a98b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51d55feee1c9378afae0885cd063f39a2b4bd057db72c776941837d8d4098132\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6dc29635af9843109e3c200fc756d352e1a81b01dcc52a30a005291d1fa9f8d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e0b3573713f1b9d23177e5a1412d8db366ca718e6c19f6af9e234940505f2e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20382715fd9d79a3b60b16d950d6c4fe5b0d34ae14ad034f684d97762205263a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cfae59bc7d6183b2b5c6c04dd53b1bfc8b62abf90f67f74c7a56bf5039a5b50a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cfae59bc7d6183b2b5c6c04dd53b1bfc8b62abf90f67f74c7a56bf5039a5b50a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88968e9f639dc3664a87729128c531283b37b7c2da6aae70905f9f63a980fb54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88968e9f639dc3664a87729128c531283b37b7c2da6aae70905f9f63a980fb54\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://1ae0cbc996e8c2d24e8fbbf34b292d994bcc704c7b27fdc8b7fb7e64be0902ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1ae0cbc996e8c2d24e8fbbf34b292d994bcc704c7b27fdc8b7fb7e64be0902ca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:15Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:37Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.036339 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:37Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.049621 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://455d92e6c0a43eabcbdc94940cb5bb0308a747e311fb79711b5ec8586d2f290c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e98da4809cdbbaa0bc7a3ed73df6eeb9260101f8336e16e3e93cba2be423f957\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:37Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.073338 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-wc2jq"] Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.073916 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-additional-cni-plugins-zk2kz"] Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.074138 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-wc2jq" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.074679 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/node-resolver-9tw9m"] Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.074843 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-zk2kz" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.075030 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-9tw9m" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.080656 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-daemon-hx68m"] Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.080933 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.081574 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.084630 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.084634 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.084680 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.084833 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.086142 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.086399 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.086463 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.086462 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.087950 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.088286 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.088282 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.088451 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.088711 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.091231 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.098458 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wc2jq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e5a790e1-c591-4cfc-930f-4805a923790b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4s782\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wc2jq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:37Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.128665 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"01cd6f59-4f16-41d3-b43b-cd3cb9efd9b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ca1c70fa4c2dd9e87cc15766e27c2735768f392d019d6004ae50c94595651c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4bed5cf79847998d3d72c69b2f5773d64affb1a1f13ecc3af99bda8307bb8d1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ba6ab5ec6a70a00059ba119970c6b139879c3c568475f2acf45b00f4ade9e22\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://671f4d3e518bf0230af61a6bfc3f70b8cea7f0e8f972ad605a78cded0306b084\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ed3180f0badc58d1421537275fe9a04a086a483149f54192c9d8eae4b4c3dea2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-07T07:56:29Z\\\",\\\"message\\\":\\\"W1007 07:56:18.835173 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1007 07:56:18.835622 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759823778 cert, and key in /tmp/serving-cert-1609550910/serving-signer.crt, /tmp/serving-cert-1609550910/serving-signer.key\\\\nI1007 07:56:19.144033 1 observer_polling.go:159] Starting file observer\\\\nW1007 07:56:19.147193 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1007 07:56:19.147327 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1007 07:56:19.149025 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1609550910/tls.crt::/tmp/serving-cert-1609550910/tls.key\\\\\\\"\\\\nF1007 07:56:29.310074 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://671f4d3e518bf0230af61a6bfc3f70b8cea7f0e8f972ad605a78cded0306b084\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"message\\\":\\\"le observer\\\\nW1007 07:56:35.379471 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1007 07:56:35.379613 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1007 07:56:35.380419 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1733405231/tls.crt::/tmp/serving-cert-1733405231/tls.key\\\\\\\"\\\\nI1007 07:56:35.960385 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1007 07:56:35.963357 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1007 07:56:35.963380 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1007 07:56:35.963401 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1007 07:56:35.963407 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1007 07:56:35.978178 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1007 07:56:35.978231 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1007 07:56:35.978242 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI1007 07:56:35.978236 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1007 07:56:35.978254 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1007 07:56:35.978304 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1007 07:56:35.978312 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1007 07:56:35.978319 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1007 07:56:35.979012 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0b72e1d82ba1cfc118ea122535db5c358f503a17b497c21c481e1b56bb578e8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec502d866371b40ecc3c8728889725cc1c2d9c8be18dbce17935992980fc1b9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec502d866371b40ecc3c8728889725cc1c2d9c8be18dbce17935992980fc1b9a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:37Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.139126 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:37Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.164173 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:37Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.211977 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43e570e7b8391b0f38f3228dc0380ac8ee23e83f498989343bdee6a53c3ede41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:37Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.213184 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/7021fc56-b485-4ca6-80e8-56665ade004f-cnibin\") pod \"multus-additional-cni-plugins-zk2kz\" (UID: \"7021fc56-b485-4ca6-80e8-56665ade004f\") " pod="openshift-multus/multus-additional-cni-plugins-zk2kz" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.213221 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/e5a790e1-c591-4cfc-930f-4805a923790b-multus-daemon-config\") pod \"multus-wc2jq\" (UID: \"e5a790e1-c591-4cfc-930f-4805a923790b\") " pod="openshift-multus/multus-wc2jq" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.213236 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/e5a790e1-c591-4cfc-930f-4805a923790b-etc-kubernetes\") pod \"multus-wc2jq\" (UID: \"e5a790e1-c591-4cfc-930f-4805a923790b\") " pod="openshift-multus/multus-wc2jq" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.213258 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/7021fc56-b485-4ca6-80e8-56665ade004f-tuning-conf-dir\") pod \"multus-additional-cni-plugins-zk2kz\" (UID: \"7021fc56-b485-4ca6-80e8-56665ade004f\") " pod="openshift-multus/multus-additional-cni-plugins-zk2kz" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.213280 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/e5a790e1-c591-4cfc-930f-4805a923790b-os-release\") pod \"multus-wc2jq\" (UID: \"e5a790e1-c591-4cfc-930f-4805a923790b\") " pod="openshift-multus/multus-wc2jq" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.213295 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/e5a790e1-c591-4cfc-930f-4805a923790b-host-run-netns\") pod \"multus-wc2jq\" (UID: \"e5a790e1-c591-4cfc-930f-4805a923790b\") " pod="openshift-multus/multus-wc2jq" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.213313 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/e5a790e1-c591-4cfc-930f-4805a923790b-multus-socket-dir-parent\") pod \"multus-wc2jq\" (UID: \"e5a790e1-c591-4cfc-930f-4805a923790b\") " pod="openshift-multus/multus-wc2jq" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.213329 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/e5a790e1-c591-4cfc-930f-4805a923790b-host-var-lib-cni-multus\") pod \"multus-wc2jq\" (UID: \"e5a790e1-c591-4cfc-930f-4805a923790b\") " pod="openshift-multus/multus-wc2jq" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.213345 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/3928c10c-c3da-41eb-96b2-629d67cfb31f-rootfs\") pod \"machine-config-daemon-hx68m\" (UID: \"3928c10c-c3da-41eb-96b2-629d67cfb31f\") " pod="openshift-machine-config-operator/machine-config-daemon-hx68m" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.213366 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/e5a790e1-c591-4cfc-930f-4805a923790b-multus-cni-dir\") pod \"multus-wc2jq\" (UID: \"e5a790e1-c591-4cfc-930f-4805a923790b\") " pod="openshift-multus/multus-wc2jq" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.213385 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/e5a790e1-c591-4cfc-930f-4805a923790b-multus-conf-dir\") pod \"multus-wc2jq\" (UID: \"e5a790e1-c591-4cfc-930f-4805a923790b\") " pod="openshift-multus/multus-wc2jq" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.213404 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qd4f6\" (UniqueName: \"kubernetes.io/projected/3928c10c-c3da-41eb-96b2-629d67cfb31f-kube-api-access-qd4f6\") pod \"machine-config-daemon-hx68m\" (UID: \"3928c10c-c3da-41eb-96b2-629d67cfb31f\") " pod="openshift-machine-config-operator/machine-config-daemon-hx68m" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.213421 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/e5a790e1-c591-4cfc-930f-4805a923790b-cni-binary-copy\") pod \"multus-wc2jq\" (UID: \"e5a790e1-c591-4cfc-930f-4805a923790b\") " pod="openshift-multus/multus-wc2jq" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.213439 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/f8b02a8a-b8d2-4097-b768-132e4c46938a-hosts-file\") pod \"node-resolver-9tw9m\" (UID: \"f8b02a8a-b8d2-4097-b768-132e4c46938a\") " pod="openshift-dns/node-resolver-9tw9m" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.213459 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rjttk\" (UniqueName: \"kubernetes.io/projected/f8b02a8a-b8d2-4097-b768-132e4c46938a-kube-api-access-rjttk\") pod \"node-resolver-9tw9m\" (UID: \"f8b02a8a-b8d2-4097-b768-132e4c46938a\") " pod="openshift-dns/node-resolver-9tw9m" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.213475 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/e5a790e1-c591-4cfc-930f-4805a923790b-hostroot\") pod \"multus-wc2jq\" (UID: \"e5a790e1-c591-4cfc-930f-4805a923790b\") " pod="openshift-multus/multus-wc2jq" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.213490 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4s782\" (UniqueName: \"kubernetes.io/projected/e5a790e1-c591-4cfc-930f-4805a923790b-kube-api-access-4s782\") pod \"multus-wc2jq\" (UID: \"e5a790e1-c591-4cfc-930f-4805a923790b\") " pod="openshift-multus/multus-wc2jq" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.213504 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/e5a790e1-c591-4cfc-930f-4805a923790b-system-cni-dir\") pod \"multus-wc2jq\" (UID: \"e5a790e1-c591-4cfc-930f-4805a923790b\") " pod="openshift-multus/multus-wc2jq" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.213518 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/7021fc56-b485-4ca6-80e8-56665ade004f-os-release\") pod \"multus-additional-cni-plugins-zk2kz\" (UID: \"7021fc56-b485-4ca6-80e8-56665ade004f\") " pod="openshift-multus/multus-additional-cni-plugins-zk2kz" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.213557 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7021fc56-b485-4ca6-80e8-56665ade004f-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-zk2kz\" (UID: \"7021fc56-b485-4ca6-80e8-56665ade004f\") " pod="openshift-multus/multus-additional-cni-plugins-zk2kz" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.213630 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/e5a790e1-c591-4cfc-930f-4805a923790b-host-run-multus-certs\") pod \"multus-wc2jq\" (UID: \"e5a790e1-c591-4cfc-930f-4805a923790b\") " pod="openshift-multus/multus-wc2jq" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.213704 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/e5a790e1-c591-4cfc-930f-4805a923790b-host-var-lib-cni-bin\") pod \"multus-wc2jq\" (UID: \"e5a790e1-c591-4cfc-930f-4805a923790b\") " pod="openshift-multus/multus-wc2jq" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.213728 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7021fc56-b485-4ca6-80e8-56665ade004f-cni-binary-copy\") pod \"multus-additional-cni-plugins-zk2kz\" (UID: \"7021fc56-b485-4ca6-80e8-56665ade004f\") " pod="openshift-multus/multus-additional-cni-plugins-zk2kz" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.213746 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/3928c10c-c3da-41eb-96b2-629d67cfb31f-proxy-tls\") pod \"machine-config-daemon-hx68m\" (UID: \"3928c10c-c3da-41eb-96b2-629d67cfb31f\") " pod="openshift-machine-config-operator/machine-config-daemon-hx68m" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.213773 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/7021fc56-b485-4ca6-80e8-56665ade004f-system-cni-dir\") pod \"multus-additional-cni-plugins-zk2kz\" (UID: \"7021fc56-b485-4ca6-80e8-56665ade004f\") " pod="openshift-multus/multus-additional-cni-plugins-zk2kz" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.213788 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q24bq\" (UniqueName: \"kubernetes.io/projected/7021fc56-b485-4ca6-80e8-56665ade004f-kube-api-access-q24bq\") pod \"multus-additional-cni-plugins-zk2kz\" (UID: \"7021fc56-b485-4ca6-80e8-56665ade004f\") " pod="openshift-multus/multus-additional-cni-plugins-zk2kz" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.213814 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/e5a790e1-c591-4cfc-930f-4805a923790b-cnibin\") pod \"multus-wc2jq\" (UID: \"e5a790e1-c591-4cfc-930f-4805a923790b\") " pod="openshift-multus/multus-wc2jq" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.213854 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/e5a790e1-c591-4cfc-930f-4805a923790b-host-run-k8s-cni-cncf-io\") pod \"multus-wc2jq\" (UID: \"e5a790e1-c591-4cfc-930f-4805a923790b\") " pod="openshift-multus/multus-wc2jq" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.213917 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/3928c10c-c3da-41eb-96b2-629d67cfb31f-mcd-auth-proxy-config\") pod \"machine-config-daemon-hx68m\" (UID: \"3928c10c-c3da-41eb-96b2-629d67cfb31f\") " pod="openshift-machine-config-operator/machine-config-daemon-hx68m" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.213941 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/e5a790e1-c591-4cfc-930f-4805a923790b-host-var-lib-kubelet\") pod \"multus-wc2jq\" (UID: \"e5a790e1-c591-4cfc-930f-4805a923790b\") " pod="openshift-multus/multus-wc2jq" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.242956 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:37Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.262350 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62ae4c53-7cf1-4229-84b4-045397dbcfba\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ae8881986c1b2abd841adfadbac7dc3d73096c5366485350536c08f478f9762\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://05f20f2103458077317614bcd5338d803c52c67e7dfc562103c817d33fa5bc79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f2213a20d36b8a125c6004957d5be402e0b85bbc2165ebd964ab499dfffb877\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://841f5c2218ad3912bf23dc2df80f24ca696829ba8e2c2d9722264688b25f0846\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:15Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:37Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.293018 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3527b3b9-1734-481d-9d80-9093e388afec\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7149243f8781d5a5e845821a8016bc06ec3678d69dfbde72d3429c48c64a98b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51d55feee1c9378afae0885cd063f39a2b4bd057db72c776941837d8d4098132\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6dc29635af9843109e3c200fc756d352e1a81b01dcc52a30a005291d1fa9f8d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e0b3573713f1b9d23177e5a1412d8db366ca718e6c19f6af9e234940505f2e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20382715fd9d79a3b60b16d950d6c4fe5b0d34ae14ad034f684d97762205263a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cfae59bc7d6183b2b5c6c04dd53b1bfc8b62abf90f67f74c7a56bf5039a5b50a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cfae59bc7d6183b2b5c6c04dd53b1bfc8b62abf90f67f74c7a56bf5039a5b50a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88968e9f639dc3664a87729128c531283b37b7c2da6aae70905f9f63a980fb54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88968e9f639dc3664a87729128c531283b37b7c2da6aae70905f9f63a980fb54\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://1ae0cbc996e8c2d24e8fbbf34b292d994bcc704c7b27fdc8b7fb7e64be0902ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1ae0cbc996e8c2d24e8fbbf34b292d994bcc704c7b27fdc8b7fb7e64be0902ca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:15Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:37Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.311113 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:37Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.314277 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.314341 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/e5a790e1-c591-4cfc-930f-4805a923790b-cni-binary-copy\") pod \"multus-wc2jq\" (UID: \"e5a790e1-c591-4cfc-930f-4805a923790b\") " pod="openshift-multus/multus-wc2jq" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.314362 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/f8b02a8a-b8d2-4097-b768-132e4c46938a-hosts-file\") pod \"node-resolver-9tw9m\" (UID: \"f8b02a8a-b8d2-4097-b768-132e4c46938a\") " pod="openshift-dns/node-resolver-9tw9m" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.314382 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rjttk\" (UniqueName: \"kubernetes.io/projected/f8b02a8a-b8d2-4097-b768-132e4c46938a-kube-api-access-rjttk\") pod \"node-resolver-9tw9m\" (UID: \"f8b02a8a-b8d2-4097-b768-132e4c46938a\") " pod="openshift-dns/node-resolver-9tw9m" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.314400 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/e5a790e1-c591-4cfc-930f-4805a923790b-hostroot\") pod \"multus-wc2jq\" (UID: \"e5a790e1-c591-4cfc-930f-4805a923790b\") " pod="openshift-multus/multus-wc2jq" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.314418 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4s782\" (UniqueName: \"kubernetes.io/projected/e5a790e1-c591-4cfc-930f-4805a923790b-kube-api-access-4s782\") pod \"multus-wc2jq\" (UID: \"e5a790e1-c591-4cfc-930f-4805a923790b\") " pod="openshift-multus/multus-wc2jq" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.314438 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/e5a790e1-c591-4cfc-930f-4805a923790b-system-cni-dir\") pod \"multus-wc2jq\" (UID: \"e5a790e1-c591-4cfc-930f-4805a923790b\") " pod="openshift-multus/multus-wc2jq" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.314498 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/e5a790e1-c591-4cfc-930f-4805a923790b-system-cni-dir\") pod \"multus-wc2jq\" (UID: \"e5a790e1-c591-4cfc-930f-4805a923790b\") " pod="openshift-multus/multus-wc2jq" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.314500 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/e5a790e1-c591-4cfc-930f-4805a923790b-hostroot\") pod \"multus-wc2jq\" (UID: \"e5a790e1-c591-4cfc-930f-4805a923790b\") " pod="openshift-multus/multus-wc2jq" Oct 07 07:56:37 crc kubenswrapper[4875]: E1007 07:56:37.314515 4875 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 07:56:39.314479513 +0000 UTC m=+24.274250186 (durationBeforeRetry 2s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.314687 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/f8b02a8a-b8d2-4097-b768-132e4c46938a-hosts-file\") pod \"node-resolver-9tw9m\" (UID: \"f8b02a8a-b8d2-4097-b768-132e4c46938a\") " pod="openshift-dns/node-resolver-9tw9m" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.314688 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.314735 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/7021fc56-b485-4ca6-80e8-56665ade004f-os-release\") pod \"multus-additional-cni-plugins-zk2kz\" (UID: \"7021fc56-b485-4ca6-80e8-56665ade004f\") " pod="openshift-multus/multus-additional-cni-plugins-zk2kz" Oct 07 07:56:37 crc kubenswrapper[4875]: E1007 07:56:37.314770 4875 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 07 07:56:37 crc kubenswrapper[4875]: E1007 07:56:37.314788 4875 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 07 07:56:37 crc kubenswrapper[4875]: E1007 07:56:37.314799 4875 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 07 07:56:37 crc kubenswrapper[4875]: E1007 07:56:37.314837 4875 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-07 07:56:39.314823084 +0000 UTC m=+24.274593627 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.314926 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.314959 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.314977 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7021fc56-b485-4ca6-80e8-56665ade004f-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-zk2kz\" (UID: \"7021fc56-b485-4ca6-80e8-56665ade004f\") " pod="openshift-multus/multus-additional-cni-plugins-zk2kz" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.314998 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/e5a790e1-c591-4cfc-930f-4805a923790b-host-run-multus-certs\") pod \"multus-wc2jq\" (UID: \"e5a790e1-c591-4cfc-930f-4805a923790b\") " pod="openshift-multus/multus-wc2jq" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.315021 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/e5a790e1-c591-4cfc-930f-4805a923790b-host-var-lib-cni-bin\") pod \"multus-wc2jq\" (UID: \"e5a790e1-c591-4cfc-930f-4805a923790b\") " pod="openshift-multus/multus-wc2jq" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.315042 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7021fc56-b485-4ca6-80e8-56665ade004f-cni-binary-copy\") pod \"multus-additional-cni-plugins-zk2kz\" (UID: \"7021fc56-b485-4ca6-80e8-56665ade004f\") " pod="openshift-multus/multus-additional-cni-plugins-zk2kz" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.315060 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/3928c10c-c3da-41eb-96b2-629d67cfb31f-proxy-tls\") pod \"machine-config-daemon-hx68m\" (UID: \"3928c10c-c3da-41eb-96b2-629d67cfb31f\") " pod="openshift-machine-config-operator/machine-config-daemon-hx68m" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.315077 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/e5a790e1-c591-4cfc-930f-4805a923790b-host-run-k8s-cni-cncf-io\") pod \"multus-wc2jq\" (UID: \"e5a790e1-c591-4cfc-930f-4805a923790b\") " pod="openshift-multus/multus-wc2jq" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.315096 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.315113 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/7021fc56-b485-4ca6-80e8-56665ade004f-system-cni-dir\") pod \"multus-additional-cni-plugins-zk2kz\" (UID: \"7021fc56-b485-4ca6-80e8-56665ade004f\") " pod="openshift-multus/multus-additional-cni-plugins-zk2kz" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.315124 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/e5a790e1-c591-4cfc-930f-4805a923790b-host-var-lib-cni-bin\") pod \"multus-wc2jq\" (UID: \"e5a790e1-c591-4cfc-930f-4805a923790b\") " pod="openshift-multus/multus-wc2jq" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.315130 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q24bq\" (UniqueName: \"kubernetes.io/projected/7021fc56-b485-4ca6-80e8-56665ade004f-kube-api-access-q24bq\") pod \"multus-additional-cni-plugins-zk2kz\" (UID: \"7021fc56-b485-4ca6-80e8-56665ade004f\") " pod="openshift-multus/multus-additional-cni-plugins-zk2kz" Oct 07 07:56:37 crc kubenswrapper[4875]: E1007 07:56:37.315136 4875 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.315156 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/e5a790e1-c591-4cfc-930f-4805a923790b-cnibin\") pod \"multus-wc2jq\" (UID: \"e5a790e1-c591-4cfc-930f-4805a923790b\") " pod="openshift-multus/multus-wc2jq" Oct 07 07:56:37 crc kubenswrapper[4875]: E1007 07:56:37.315170 4875 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.315175 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/e5a790e1-c591-4cfc-930f-4805a923790b-host-var-lib-kubelet\") pod \"multus-wc2jq\" (UID: \"e5a790e1-c591-4cfc-930f-4805a923790b\") " pod="openshift-multus/multus-wc2jq" Oct 07 07:56:37 crc kubenswrapper[4875]: E1007 07:56:37.315190 4875 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 07 07:56:37 crc kubenswrapper[4875]: E1007 07:56:37.315261 4875 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-07 07:56:39.315236087 +0000 UTC m=+24.275006800 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.315196 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/3928c10c-c3da-41eb-96b2-629d67cfb31f-mcd-auth-proxy-config\") pod \"machine-config-daemon-hx68m\" (UID: \"3928c10c-c3da-41eb-96b2-629d67cfb31f\") " pod="openshift-machine-config-operator/machine-config-daemon-hx68m" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.315328 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/7021fc56-b485-4ca6-80e8-56665ade004f-cnibin\") pod \"multus-additional-cni-plugins-zk2kz\" (UID: \"7021fc56-b485-4ca6-80e8-56665ade004f\") " pod="openshift-multus/multus-additional-cni-plugins-zk2kz" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.315366 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/e5a790e1-c591-4cfc-930f-4805a923790b-multus-daemon-config\") pod \"multus-wc2jq\" (UID: \"e5a790e1-c591-4cfc-930f-4805a923790b\") " pod="openshift-multus/multus-wc2jq" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.315397 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/e5a790e1-c591-4cfc-930f-4805a923790b-etc-kubernetes\") pod \"multus-wc2jq\" (UID: \"e5a790e1-c591-4cfc-930f-4805a923790b\") " pod="openshift-multus/multus-wc2jq" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.315391 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/7021fc56-b485-4ca6-80e8-56665ade004f-os-release\") pod \"multus-additional-cni-plugins-zk2kz\" (UID: \"7021fc56-b485-4ca6-80e8-56665ade004f\") " pod="openshift-multus/multus-additional-cni-plugins-zk2kz" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.315428 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/7021fc56-b485-4ca6-80e8-56665ade004f-tuning-conf-dir\") pod \"multus-additional-cni-plugins-zk2kz\" (UID: \"7021fc56-b485-4ca6-80e8-56665ade004f\") " pod="openshift-multus/multus-additional-cni-plugins-zk2kz" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.315500 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/e5a790e1-c591-4cfc-930f-4805a923790b-os-release\") pod \"multus-wc2jq\" (UID: \"e5a790e1-c591-4cfc-930f-4805a923790b\") " pod="openshift-multus/multus-wc2jq" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.315536 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/e5a790e1-c591-4cfc-930f-4805a923790b-host-run-netns\") pod \"multus-wc2jq\" (UID: \"e5a790e1-c591-4cfc-930f-4805a923790b\") " pod="openshift-multus/multus-wc2jq" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.315568 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/e5a790e1-c591-4cfc-930f-4805a923790b-multus-socket-dir-parent\") pod \"multus-wc2jq\" (UID: \"e5a790e1-c591-4cfc-930f-4805a923790b\") " pod="openshift-multus/multus-wc2jq" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.315596 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/e5a790e1-c591-4cfc-930f-4805a923790b-host-var-lib-cni-multus\") pod \"multus-wc2jq\" (UID: \"e5a790e1-c591-4cfc-930f-4805a923790b\") " pod="openshift-multus/multus-wc2jq" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.315625 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/3928c10c-c3da-41eb-96b2-629d67cfb31f-rootfs\") pod \"machine-config-daemon-hx68m\" (UID: \"3928c10c-c3da-41eb-96b2-629d67cfb31f\") " pod="openshift-machine-config-operator/machine-config-daemon-hx68m" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.315658 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/e5a790e1-c591-4cfc-930f-4805a923790b-multus-cni-dir\") pod \"multus-wc2jq\" (UID: \"e5a790e1-c591-4cfc-930f-4805a923790b\") " pod="openshift-multus/multus-wc2jq" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.315684 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/e5a790e1-c591-4cfc-930f-4805a923790b-multus-conf-dir\") pod \"multus-wc2jq\" (UID: \"e5a790e1-c591-4cfc-930f-4805a923790b\") " pod="openshift-multus/multus-wc2jq" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.315682 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/7021fc56-b485-4ca6-80e8-56665ade004f-cnibin\") pod \"multus-additional-cni-plugins-zk2kz\" (UID: \"7021fc56-b485-4ca6-80e8-56665ade004f\") " pod="openshift-multus/multus-additional-cni-plugins-zk2kz" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.315716 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qd4f6\" (UniqueName: \"kubernetes.io/projected/3928c10c-c3da-41eb-96b2-629d67cfb31f-kube-api-access-qd4f6\") pod \"machine-config-daemon-hx68m\" (UID: \"3928c10c-c3da-41eb-96b2-629d67cfb31f\") " pod="openshift-machine-config-operator/machine-config-daemon-hx68m" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.315859 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/3928c10c-c3da-41eb-96b2-629d67cfb31f-mcd-auth-proxy-config\") pod \"machine-config-daemon-hx68m\" (UID: \"3928c10c-c3da-41eb-96b2-629d67cfb31f\") " pod="openshift-machine-config-operator/machine-config-daemon-hx68m" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.315920 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/e5a790e1-c591-4cfc-930f-4805a923790b-host-run-multus-certs\") pod \"multus-wc2jq\" (UID: \"e5a790e1-c591-4cfc-930f-4805a923790b\") " pod="openshift-multus/multus-wc2jq" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.315958 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/e5a790e1-c591-4cfc-930f-4805a923790b-cnibin\") pod \"multus-wc2jq\" (UID: \"e5a790e1-c591-4cfc-930f-4805a923790b\") " pod="openshift-multus/multus-wc2jq" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.315979 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/e5a790e1-c591-4cfc-930f-4805a923790b-host-var-lib-kubelet\") pod \"multus-wc2jq\" (UID: \"e5a790e1-c591-4cfc-930f-4805a923790b\") " pod="openshift-multus/multus-wc2jq" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.316005 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/e5a790e1-c591-4cfc-930f-4805a923790b-host-var-lib-cni-multus\") pod \"multus-wc2jq\" (UID: \"e5a790e1-c591-4cfc-930f-4805a923790b\") " pod="openshift-multus/multus-wc2jq" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.316030 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/3928c10c-c3da-41eb-96b2-629d67cfb31f-rootfs\") pod \"machine-config-daemon-hx68m\" (UID: \"3928c10c-c3da-41eb-96b2-629d67cfb31f\") " pod="openshift-machine-config-operator/machine-config-daemon-hx68m" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.316052 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/e5a790e1-c591-4cfc-930f-4805a923790b-os-release\") pod \"multus-wc2jq\" (UID: \"e5a790e1-c591-4cfc-930f-4805a923790b\") " pod="openshift-multus/multus-wc2jq" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.316067 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7021fc56-b485-4ca6-80e8-56665ade004f-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-zk2kz\" (UID: \"7021fc56-b485-4ca6-80e8-56665ade004f\") " pod="openshift-multus/multus-additional-cni-plugins-zk2kz" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.316080 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7021fc56-b485-4ca6-80e8-56665ade004f-cni-binary-copy\") pod \"multus-additional-cni-plugins-zk2kz\" (UID: \"7021fc56-b485-4ca6-80e8-56665ade004f\") " pod="openshift-multus/multus-additional-cni-plugins-zk2kz" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.316081 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/e5a790e1-c591-4cfc-930f-4805a923790b-host-run-netns\") pod \"multus-wc2jq\" (UID: \"e5a790e1-c591-4cfc-930f-4805a923790b\") " pod="openshift-multus/multus-wc2jq" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.316109 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/e5a790e1-c591-4cfc-930f-4805a923790b-multus-socket-dir-parent\") pod \"multus-wc2jq\" (UID: \"e5a790e1-c591-4cfc-930f-4805a923790b\") " pod="openshift-multus/multus-wc2jq" Oct 07 07:56:37 crc kubenswrapper[4875]: E1007 07:56:37.316140 4875 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.316144 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/e5a790e1-c591-4cfc-930f-4805a923790b-multus-conf-dir\") pod \"multus-wc2jq\" (UID: \"e5a790e1-c591-4cfc-930f-4805a923790b\") " pod="openshift-multus/multus-wc2jq" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.316162 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/e5a790e1-c591-4cfc-930f-4805a923790b-host-run-k8s-cni-cncf-io\") pod \"multus-wc2jq\" (UID: \"e5a790e1-c591-4cfc-930f-4805a923790b\") " pod="openshift-multus/multus-wc2jq" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.316170 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/e5a790e1-c591-4cfc-930f-4805a923790b-etc-kubernetes\") pod \"multus-wc2jq\" (UID: \"e5a790e1-c591-4cfc-930f-4805a923790b\") " pod="openshift-multus/multus-wc2jq" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.316196 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/e5a790e1-c591-4cfc-930f-4805a923790b-multus-cni-dir\") pod \"multus-wc2jq\" (UID: \"e5a790e1-c591-4cfc-930f-4805a923790b\") " pod="openshift-multus/multus-wc2jq" Oct 07 07:56:37 crc kubenswrapper[4875]: E1007 07:56:37.316193 4875 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-07 07:56:39.316181317 +0000 UTC m=+24.275951860 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 07 07:56:37 crc kubenswrapper[4875]: E1007 07:56:37.316236 4875 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.316274 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/e5a790e1-c591-4cfc-930f-4805a923790b-multus-daemon-config\") pod \"multus-wc2jq\" (UID: \"e5a790e1-c591-4cfc-930f-4805a923790b\") " pod="openshift-multus/multus-wc2jq" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.316144 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/7021fc56-b485-4ca6-80e8-56665ade004f-system-cni-dir\") pod \"multus-additional-cni-plugins-zk2kz\" (UID: \"7021fc56-b485-4ca6-80e8-56665ade004f\") " pod="openshift-multus/multus-additional-cni-plugins-zk2kz" Oct 07 07:56:37 crc kubenswrapper[4875]: E1007 07:56:37.316291 4875 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-07 07:56:39.3162768 +0000 UTC m=+24.276047343 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.316327 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/7021fc56-b485-4ca6-80e8-56665ade004f-tuning-conf-dir\") pod \"multus-additional-cni-plugins-zk2kz\" (UID: \"7021fc56-b485-4ca6-80e8-56665ade004f\") " pod="openshift-multus/multus-additional-cni-plugins-zk2kz" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.316612 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/e5a790e1-c591-4cfc-930f-4805a923790b-cni-binary-copy\") pod \"multus-wc2jq\" (UID: \"e5a790e1-c591-4cfc-930f-4805a923790b\") " pod="openshift-multus/multus-wc2jq" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.324649 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/3928c10c-c3da-41eb-96b2-629d67cfb31f-proxy-tls\") pod \"machine-config-daemon-hx68m\" (UID: \"3928c10c-c3da-41eb-96b2-629d67cfb31f\") " pod="openshift-machine-config-operator/machine-config-daemon-hx68m" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.329913 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://455d92e6c0a43eabcbdc94940cb5bb0308a747e311fb79711b5ec8586d2f290c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e98da4809cdbbaa0bc7a3ed73df6eeb9260101f8336e16e3e93cba2be423f957\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:37Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.334582 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qd4f6\" (UniqueName: \"kubernetes.io/projected/3928c10c-c3da-41eb-96b2-629d67cfb31f-kube-api-access-qd4f6\") pod \"machine-config-daemon-hx68m\" (UID: \"3928c10c-c3da-41eb-96b2-629d67cfb31f\") " pod="openshift-machine-config-operator/machine-config-daemon-hx68m" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.339702 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4s782\" (UniqueName: \"kubernetes.io/projected/e5a790e1-c591-4cfc-930f-4805a923790b-kube-api-access-4s782\") pod \"multus-wc2jq\" (UID: \"e5a790e1-c591-4cfc-930f-4805a923790b\") " pod="openshift-multus/multus-wc2jq" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.342306 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q24bq\" (UniqueName: \"kubernetes.io/projected/7021fc56-b485-4ca6-80e8-56665ade004f-kube-api-access-q24bq\") pod \"multus-additional-cni-plugins-zk2kz\" (UID: \"7021fc56-b485-4ca6-80e8-56665ade004f\") " pod="openshift-multus/multus-additional-cni-plugins-zk2kz" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.346514 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rjttk\" (UniqueName: \"kubernetes.io/projected/f8b02a8a-b8d2-4097-b768-132e4c46938a-kube-api-access-rjttk\") pod \"node-resolver-9tw9m\" (UID: \"f8b02a8a-b8d2-4097-b768-132e4c46938a\") " pod="openshift-dns/node-resolver-9tw9m" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.348327 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wc2jq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e5a790e1-c591-4cfc-930f-4805a923790b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4s782\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wc2jq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:37Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.371799 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3928c10c-c3da-41eb-96b2-629d67cfb31f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qd4f6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qd4f6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hx68m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:37Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.388029 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-zk2kz" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.388405 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"01cd6f59-4f16-41d3-b43b-cd3cb9efd9b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ca1c70fa4c2dd9e87cc15766e27c2735768f392d019d6004ae50c94595651c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4bed5cf79847998d3d72c69b2f5773d64affb1a1f13ecc3af99bda8307bb8d1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ba6ab5ec6a70a00059ba119970c6b139879c3c568475f2acf45b00f4ade9e22\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://671f4d3e518bf0230af61a6bfc3f70b8cea7f0e8f972ad605a78cded0306b084\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ed3180f0badc58d1421537275fe9a04a086a483149f54192c9d8eae4b4c3dea2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-07T07:56:29Z\\\",\\\"message\\\":\\\"W1007 07:56:18.835173 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1007 07:56:18.835622 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759823778 cert, and key in /tmp/serving-cert-1609550910/serving-signer.crt, /tmp/serving-cert-1609550910/serving-signer.key\\\\nI1007 07:56:19.144033 1 observer_polling.go:159] Starting file observer\\\\nW1007 07:56:19.147193 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1007 07:56:19.147327 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1007 07:56:19.149025 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1609550910/tls.crt::/tmp/serving-cert-1609550910/tls.key\\\\\\\"\\\\nF1007 07:56:29.310074 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://671f4d3e518bf0230af61a6bfc3f70b8cea7f0e8f972ad605a78cded0306b084\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"message\\\":\\\"le observer\\\\nW1007 07:56:35.379471 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1007 07:56:35.379613 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1007 07:56:35.380419 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1733405231/tls.crt::/tmp/serving-cert-1733405231/tls.key\\\\\\\"\\\\nI1007 07:56:35.960385 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1007 07:56:35.963357 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1007 07:56:35.963380 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1007 07:56:35.963401 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1007 07:56:35.963407 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1007 07:56:35.978178 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1007 07:56:35.978231 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1007 07:56:35.978242 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI1007 07:56:35.978236 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1007 07:56:35.978254 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1007 07:56:35.978304 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1007 07:56:35.978312 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1007 07:56:35.978319 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1007 07:56:35.979012 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0b72e1d82ba1cfc118ea122535db5c358f503a17b497c21c481e1b56bb578e8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec502d866371b40ecc3c8728889725cc1c2d9c8be18dbce17935992980fc1b9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec502d866371b40ecc3c8728889725cc1c2d9c8be18dbce17935992980fc1b9a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:37Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.395997 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-9tw9m" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.404004 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-wc2jq" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.410990 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.413171 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:37Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:37 crc kubenswrapper[4875]: W1007 07:56:37.416473 4875 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7021fc56_b485_4ca6_80e8_56665ade004f.slice/crio-f02de939f0ed8a8e2e26bca2757f6cdca92790ebd3300f0908ed1b3cd3cc1c7b WatchSource:0}: Error finding container f02de939f0ed8a8e2e26bca2757f6cdca92790ebd3300f0908ed1b3cd3cc1c7b: Status 404 returned error can't find the container with id f02de939f0ed8a8e2e26bca2757f6cdca92790ebd3300f0908ed1b3cd3cc1c7b Oct 07 07:56:37 crc kubenswrapper[4875]: W1007 07:56:37.430142 4875 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode5a790e1_c591_4cfc_930f_4805a923790b.slice/crio-a76bb2409e1b02b1590f6ff0cc5498f1e03bb5ba2e45e5c952b50ca895fca636 WatchSource:0}: Error finding container a76bb2409e1b02b1590f6ff0cc5498f1e03bb5ba2e45e5c952b50ca895fca636: Status 404 returned error can't find the container with id a76bb2409e1b02b1590f6ff0cc5498f1e03bb5ba2e45e5c952b50ca895fca636 Oct 07 07:56:37 crc kubenswrapper[4875]: W1007 07:56:37.435171 4875 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3928c10c_c3da_41eb_96b2_629d67cfb31f.slice/crio-a519560d46e1eed7a1ef2830b01d938a7a9179dc4300cbb59df0dc1ef42f27cc WatchSource:0}: Error finding container a519560d46e1eed7a1ef2830b01d938a7a9179dc4300cbb59df0dc1ef42f27cc: Status 404 returned error can't find the container with id a519560d46e1eed7a1ef2830b01d938a7a9179dc4300cbb59df0dc1ef42f27cc Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.444032 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43e570e7b8391b0f38f3228dc0380ac8ee23e83f498989343bdee6a53c3ede41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:37Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.454498 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-8tcxj"] Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.456475 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-8tcxj" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.461375 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.461475 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.461392 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.461862 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.462558 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.469419 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.469419 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.472344 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:37Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.495362 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:37Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.515705 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-9tw9m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f8b02a8a-b8d2-4097-b768-132e4c46938a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-9tw9m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:37Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.517860 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f7806e48-48e7-4680-af2e-e93a05003370-run-openvswitch\") pod \"ovnkube-node-8tcxj\" (UID: \"f7806e48-48e7-4680-af2e-e93a05003370\") " pod="openshift-ovn-kubernetes/ovnkube-node-8tcxj" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.517918 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/f7806e48-48e7-4680-af2e-e93a05003370-host-cni-bin\") pod \"ovnkube-node-8tcxj\" (UID: \"f7806e48-48e7-4680-af2e-e93a05003370\") " pod="openshift-ovn-kubernetes/ovnkube-node-8tcxj" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.517936 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/f7806e48-48e7-4680-af2e-e93a05003370-ovn-node-metrics-cert\") pod \"ovnkube-node-8tcxj\" (UID: \"f7806e48-48e7-4680-af2e-e93a05003370\") " pod="openshift-ovn-kubernetes/ovnkube-node-8tcxj" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.517958 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f7806e48-48e7-4680-af2e-e93a05003370-etc-openvswitch\") pod \"ovnkube-node-8tcxj\" (UID: \"f7806e48-48e7-4680-af2e-e93a05003370\") " pod="openshift-ovn-kubernetes/ovnkube-node-8tcxj" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.517974 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/f7806e48-48e7-4680-af2e-e93a05003370-host-cni-netd\") pod \"ovnkube-node-8tcxj\" (UID: \"f7806e48-48e7-4680-af2e-e93a05003370\") " pod="openshift-ovn-kubernetes/ovnkube-node-8tcxj" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.518053 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/f7806e48-48e7-4680-af2e-e93a05003370-systemd-units\") pod \"ovnkube-node-8tcxj\" (UID: \"f7806e48-48e7-4680-af2e-e93a05003370\") " pod="openshift-ovn-kubernetes/ovnkube-node-8tcxj" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.518091 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/f7806e48-48e7-4680-af2e-e93a05003370-node-log\") pod \"ovnkube-node-8tcxj\" (UID: \"f7806e48-48e7-4680-af2e-e93a05003370\") " pod="openshift-ovn-kubernetes/ovnkube-node-8tcxj" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.518113 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/f7806e48-48e7-4680-af2e-e93a05003370-host-run-ovn-kubernetes\") pod \"ovnkube-node-8tcxj\" (UID: \"f7806e48-48e7-4680-af2e-e93a05003370\") " pod="openshift-ovn-kubernetes/ovnkube-node-8tcxj" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.518147 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f7806e48-48e7-4680-af2e-e93a05003370-var-lib-openvswitch\") pod \"ovnkube-node-8tcxj\" (UID: \"f7806e48-48e7-4680-af2e-e93a05003370\") " pod="openshift-ovn-kubernetes/ovnkube-node-8tcxj" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.518163 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/f7806e48-48e7-4680-af2e-e93a05003370-run-ovn\") pod \"ovnkube-node-8tcxj\" (UID: \"f7806e48-48e7-4680-af2e-e93a05003370\") " pod="openshift-ovn-kubernetes/ovnkube-node-8tcxj" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.518233 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/f7806e48-48e7-4680-af2e-e93a05003370-host-kubelet\") pod \"ovnkube-node-8tcxj\" (UID: \"f7806e48-48e7-4680-af2e-e93a05003370\") " pod="openshift-ovn-kubernetes/ovnkube-node-8tcxj" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.518251 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/f7806e48-48e7-4680-af2e-e93a05003370-run-systemd\") pod \"ovnkube-node-8tcxj\" (UID: \"f7806e48-48e7-4680-af2e-e93a05003370\") " pod="openshift-ovn-kubernetes/ovnkube-node-8tcxj" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.518270 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/f7806e48-48e7-4680-af2e-e93a05003370-host-slash\") pod \"ovnkube-node-8tcxj\" (UID: \"f7806e48-48e7-4680-af2e-e93a05003370\") " pod="openshift-ovn-kubernetes/ovnkube-node-8tcxj" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.518284 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/f7806e48-48e7-4680-af2e-e93a05003370-ovnkube-config\") pod \"ovnkube-node-8tcxj\" (UID: \"f7806e48-48e7-4680-af2e-e93a05003370\") " pod="openshift-ovn-kubernetes/ovnkube-node-8tcxj" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.518302 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mq886\" (UniqueName: \"kubernetes.io/projected/f7806e48-48e7-4680-af2e-e93a05003370-kube-api-access-mq886\") pod \"ovnkube-node-8tcxj\" (UID: \"f7806e48-48e7-4680-af2e-e93a05003370\") " pod="openshift-ovn-kubernetes/ovnkube-node-8tcxj" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.518339 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/f7806e48-48e7-4680-af2e-e93a05003370-log-socket\") pod \"ovnkube-node-8tcxj\" (UID: \"f7806e48-48e7-4680-af2e-e93a05003370\") " pod="openshift-ovn-kubernetes/ovnkube-node-8tcxj" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.518358 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/f7806e48-48e7-4680-af2e-e93a05003370-env-overrides\") pod \"ovnkube-node-8tcxj\" (UID: \"f7806e48-48e7-4680-af2e-e93a05003370\") " pod="openshift-ovn-kubernetes/ovnkube-node-8tcxj" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.518372 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/f7806e48-48e7-4680-af2e-e93a05003370-host-run-netns\") pod \"ovnkube-node-8tcxj\" (UID: \"f7806e48-48e7-4680-af2e-e93a05003370\") " pod="openshift-ovn-kubernetes/ovnkube-node-8tcxj" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.518398 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/f7806e48-48e7-4680-af2e-e93a05003370-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-8tcxj\" (UID: \"f7806e48-48e7-4680-af2e-e93a05003370\") " pod="openshift-ovn-kubernetes/ovnkube-node-8tcxj" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.518428 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/f7806e48-48e7-4680-af2e-e93a05003370-ovnkube-script-lib\") pod \"ovnkube-node-8tcxj\" (UID: \"f7806e48-48e7-4680-af2e-e93a05003370\") " pod="openshift-ovn-kubernetes/ovnkube-node-8tcxj" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.531144 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:37Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.551002 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-zk2kz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7021fc56-b485-4ca6-80e8-56665ade004f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-zk2kz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:37Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.569727 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62ae4c53-7cf1-4229-84b4-045397dbcfba\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ae8881986c1b2abd841adfadbac7dc3d73096c5366485350536c08f478f9762\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://05f20f2103458077317614bcd5338d803c52c67e7dfc562103c817d33fa5bc79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f2213a20d36b8a125c6004957d5be402e0b85bbc2165ebd964ab499dfffb877\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://841f5c2218ad3912bf23dc2df80f24ca696829ba8e2c2d9722264688b25f0846\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:15Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:37Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.593158 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3527b3b9-1734-481d-9d80-9093e388afec\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7149243f8781d5a5e845821a8016bc06ec3678d69dfbde72d3429c48c64a98b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51d55feee1c9378afae0885cd063f39a2b4bd057db72c776941837d8d4098132\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6dc29635af9843109e3c200fc756d352e1a81b01dcc52a30a005291d1fa9f8d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e0b3573713f1b9d23177e5a1412d8db366ca718e6c19f6af9e234940505f2e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20382715fd9d79a3b60b16d950d6c4fe5b0d34ae14ad034f684d97762205263a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cfae59bc7d6183b2b5c6c04dd53b1bfc8b62abf90f67f74c7a56bf5039a5b50a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cfae59bc7d6183b2b5c6c04dd53b1bfc8b62abf90f67f74c7a56bf5039a5b50a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88968e9f639dc3664a87729128c531283b37b7c2da6aae70905f9f63a980fb54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88968e9f639dc3664a87729128c531283b37b7c2da6aae70905f9f63a980fb54\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://1ae0cbc996e8c2d24e8fbbf34b292d994bcc704c7b27fdc8b7fb7e64be0902ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1ae0cbc996e8c2d24e8fbbf34b292d994bcc704c7b27fdc8b7fb7e64be0902ca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:15Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:37Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.608010 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://455d92e6c0a43eabcbdc94940cb5bb0308a747e311fb79711b5ec8586d2f290c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e98da4809cdbbaa0bc7a3ed73df6eeb9260101f8336e16e3e93cba2be423f957\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:37Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.620085 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/f7806e48-48e7-4680-af2e-e93a05003370-host-kubelet\") pod \"ovnkube-node-8tcxj\" (UID: \"f7806e48-48e7-4680-af2e-e93a05003370\") " pod="openshift-ovn-kubernetes/ovnkube-node-8tcxj" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.620151 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/f7806e48-48e7-4680-af2e-e93a05003370-run-systemd\") pod \"ovnkube-node-8tcxj\" (UID: \"f7806e48-48e7-4680-af2e-e93a05003370\") " pod="openshift-ovn-kubernetes/ovnkube-node-8tcxj" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.620176 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mq886\" (UniqueName: \"kubernetes.io/projected/f7806e48-48e7-4680-af2e-e93a05003370-kube-api-access-mq886\") pod \"ovnkube-node-8tcxj\" (UID: \"f7806e48-48e7-4680-af2e-e93a05003370\") " pod="openshift-ovn-kubernetes/ovnkube-node-8tcxj" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.620198 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/f7806e48-48e7-4680-af2e-e93a05003370-host-slash\") pod \"ovnkube-node-8tcxj\" (UID: \"f7806e48-48e7-4680-af2e-e93a05003370\") " pod="openshift-ovn-kubernetes/ovnkube-node-8tcxj" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.620212 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/f7806e48-48e7-4680-af2e-e93a05003370-host-kubelet\") pod \"ovnkube-node-8tcxj\" (UID: \"f7806e48-48e7-4680-af2e-e93a05003370\") " pod="openshift-ovn-kubernetes/ovnkube-node-8tcxj" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.620234 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/f7806e48-48e7-4680-af2e-e93a05003370-ovnkube-config\") pod \"ovnkube-node-8tcxj\" (UID: \"f7806e48-48e7-4680-af2e-e93a05003370\") " pod="openshift-ovn-kubernetes/ovnkube-node-8tcxj" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.620307 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/f7806e48-48e7-4680-af2e-e93a05003370-log-socket\") pod \"ovnkube-node-8tcxj\" (UID: \"f7806e48-48e7-4680-af2e-e93a05003370\") " pod="openshift-ovn-kubernetes/ovnkube-node-8tcxj" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.620333 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/f7806e48-48e7-4680-af2e-e93a05003370-env-overrides\") pod \"ovnkube-node-8tcxj\" (UID: \"f7806e48-48e7-4680-af2e-e93a05003370\") " pod="openshift-ovn-kubernetes/ovnkube-node-8tcxj" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.620350 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/f7806e48-48e7-4680-af2e-e93a05003370-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-8tcxj\" (UID: \"f7806e48-48e7-4680-af2e-e93a05003370\") " pod="openshift-ovn-kubernetes/ovnkube-node-8tcxj" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.620368 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/f7806e48-48e7-4680-af2e-e93a05003370-ovnkube-script-lib\") pod \"ovnkube-node-8tcxj\" (UID: \"f7806e48-48e7-4680-af2e-e93a05003370\") " pod="openshift-ovn-kubernetes/ovnkube-node-8tcxj" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.620386 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/f7806e48-48e7-4680-af2e-e93a05003370-host-run-netns\") pod \"ovnkube-node-8tcxj\" (UID: \"f7806e48-48e7-4680-af2e-e93a05003370\") " pod="openshift-ovn-kubernetes/ovnkube-node-8tcxj" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.620401 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f7806e48-48e7-4680-af2e-e93a05003370-run-openvswitch\") pod \"ovnkube-node-8tcxj\" (UID: \"f7806e48-48e7-4680-af2e-e93a05003370\") " pod="openshift-ovn-kubernetes/ovnkube-node-8tcxj" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.620417 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/f7806e48-48e7-4680-af2e-e93a05003370-host-cni-bin\") pod \"ovnkube-node-8tcxj\" (UID: \"f7806e48-48e7-4680-af2e-e93a05003370\") " pod="openshift-ovn-kubernetes/ovnkube-node-8tcxj" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.620431 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/f7806e48-48e7-4680-af2e-e93a05003370-ovn-node-metrics-cert\") pod \"ovnkube-node-8tcxj\" (UID: \"f7806e48-48e7-4680-af2e-e93a05003370\") " pod="openshift-ovn-kubernetes/ovnkube-node-8tcxj" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.620449 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f7806e48-48e7-4680-af2e-e93a05003370-etc-openvswitch\") pod \"ovnkube-node-8tcxj\" (UID: \"f7806e48-48e7-4680-af2e-e93a05003370\") " pod="openshift-ovn-kubernetes/ovnkube-node-8tcxj" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.620467 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/f7806e48-48e7-4680-af2e-e93a05003370-host-cni-netd\") pod \"ovnkube-node-8tcxj\" (UID: \"f7806e48-48e7-4680-af2e-e93a05003370\") " pod="openshift-ovn-kubernetes/ovnkube-node-8tcxj" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.620487 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/f7806e48-48e7-4680-af2e-e93a05003370-systemd-units\") pod \"ovnkube-node-8tcxj\" (UID: \"f7806e48-48e7-4680-af2e-e93a05003370\") " pod="openshift-ovn-kubernetes/ovnkube-node-8tcxj" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.620507 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/f7806e48-48e7-4680-af2e-e93a05003370-node-log\") pod \"ovnkube-node-8tcxj\" (UID: \"f7806e48-48e7-4680-af2e-e93a05003370\") " pod="openshift-ovn-kubernetes/ovnkube-node-8tcxj" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.620539 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/f7806e48-48e7-4680-af2e-e93a05003370-host-run-ovn-kubernetes\") pod \"ovnkube-node-8tcxj\" (UID: \"f7806e48-48e7-4680-af2e-e93a05003370\") " pod="openshift-ovn-kubernetes/ovnkube-node-8tcxj" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.620554 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f7806e48-48e7-4680-af2e-e93a05003370-var-lib-openvswitch\") pod \"ovnkube-node-8tcxj\" (UID: \"f7806e48-48e7-4680-af2e-e93a05003370\") " pod="openshift-ovn-kubernetes/ovnkube-node-8tcxj" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.620569 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/f7806e48-48e7-4680-af2e-e93a05003370-run-ovn\") pod \"ovnkube-node-8tcxj\" (UID: \"f7806e48-48e7-4680-af2e-e93a05003370\") " pod="openshift-ovn-kubernetes/ovnkube-node-8tcxj" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.620645 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/f7806e48-48e7-4680-af2e-e93a05003370-run-ovn\") pod \"ovnkube-node-8tcxj\" (UID: \"f7806e48-48e7-4680-af2e-e93a05003370\") " pod="openshift-ovn-kubernetes/ovnkube-node-8tcxj" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.620667 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/f7806e48-48e7-4680-af2e-e93a05003370-log-socket\") pod \"ovnkube-node-8tcxj\" (UID: \"f7806e48-48e7-4680-af2e-e93a05003370\") " pod="openshift-ovn-kubernetes/ovnkube-node-8tcxj" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.621130 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/f7806e48-48e7-4680-af2e-e93a05003370-env-overrides\") pod \"ovnkube-node-8tcxj\" (UID: \"f7806e48-48e7-4680-af2e-e93a05003370\") " pod="openshift-ovn-kubernetes/ovnkube-node-8tcxj" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.621163 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/f7806e48-48e7-4680-af2e-e93a05003370-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-8tcxj\" (UID: \"f7806e48-48e7-4680-af2e-e93a05003370\") " pod="openshift-ovn-kubernetes/ovnkube-node-8tcxj" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.621200 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/f7806e48-48e7-4680-af2e-e93a05003370-ovnkube-config\") pod \"ovnkube-node-8tcxj\" (UID: \"f7806e48-48e7-4680-af2e-e93a05003370\") " pod="openshift-ovn-kubernetes/ovnkube-node-8tcxj" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.621262 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/f7806e48-48e7-4680-af2e-e93a05003370-run-systemd\") pod \"ovnkube-node-8tcxj\" (UID: \"f7806e48-48e7-4680-af2e-e93a05003370\") " pod="openshift-ovn-kubernetes/ovnkube-node-8tcxj" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.621534 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/f7806e48-48e7-4680-af2e-e93a05003370-host-run-netns\") pod \"ovnkube-node-8tcxj\" (UID: \"f7806e48-48e7-4680-af2e-e93a05003370\") " pod="openshift-ovn-kubernetes/ovnkube-node-8tcxj" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.621585 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/f7806e48-48e7-4680-af2e-e93a05003370-host-slash\") pod \"ovnkube-node-8tcxj\" (UID: \"f7806e48-48e7-4680-af2e-e93a05003370\") " pod="openshift-ovn-kubernetes/ovnkube-node-8tcxj" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.621632 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/f7806e48-48e7-4680-af2e-e93a05003370-systemd-units\") pod \"ovnkube-node-8tcxj\" (UID: \"f7806e48-48e7-4680-af2e-e93a05003370\") " pod="openshift-ovn-kubernetes/ovnkube-node-8tcxj" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.621645 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/f7806e48-48e7-4680-af2e-e93a05003370-host-cni-netd\") pod \"ovnkube-node-8tcxj\" (UID: \"f7806e48-48e7-4680-af2e-e93a05003370\") " pod="openshift-ovn-kubernetes/ovnkube-node-8tcxj" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.621663 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f7806e48-48e7-4680-af2e-e93a05003370-run-openvswitch\") pod \"ovnkube-node-8tcxj\" (UID: \"f7806e48-48e7-4680-af2e-e93a05003370\") " pod="openshift-ovn-kubernetes/ovnkube-node-8tcxj" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.621705 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/f7806e48-48e7-4680-af2e-e93a05003370-node-log\") pod \"ovnkube-node-8tcxj\" (UID: \"f7806e48-48e7-4680-af2e-e93a05003370\") " pod="openshift-ovn-kubernetes/ovnkube-node-8tcxj" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.621721 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f7806e48-48e7-4680-af2e-e93a05003370-etc-openvswitch\") pod \"ovnkube-node-8tcxj\" (UID: \"f7806e48-48e7-4680-af2e-e93a05003370\") " pod="openshift-ovn-kubernetes/ovnkube-node-8tcxj" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.621745 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/f7806e48-48e7-4680-af2e-e93a05003370-host-run-ovn-kubernetes\") pod \"ovnkube-node-8tcxj\" (UID: \"f7806e48-48e7-4680-af2e-e93a05003370\") " pod="openshift-ovn-kubernetes/ovnkube-node-8tcxj" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.621751 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/f7806e48-48e7-4680-af2e-e93a05003370-host-cni-bin\") pod \"ovnkube-node-8tcxj\" (UID: \"f7806e48-48e7-4680-af2e-e93a05003370\") " pod="openshift-ovn-kubernetes/ovnkube-node-8tcxj" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.621787 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f7806e48-48e7-4680-af2e-e93a05003370-var-lib-openvswitch\") pod \"ovnkube-node-8tcxj\" (UID: \"f7806e48-48e7-4680-af2e-e93a05003370\") " pod="openshift-ovn-kubernetes/ovnkube-node-8tcxj" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.622802 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:37Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.623379 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/f7806e48-48e7-4680-af2e-e93a05003370-ovnkube-script-lib\") pod \"ovnkube-node-8tcxj\" (UID: \"f7806e48-48e7-4680-af2e-e93a05003370\") " pod="openshift-ovn-kubernetes/ovnkube-node-8tcxj" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.626065 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/f7806e48-48e7-4680-af2e-e93a05003370-ovn-node-metrics-cert\") pod \"ovnkube-node-8tcxj\" (UID: \"f7806e48-48e7-4680-af2e-e93a05003370\") " pod="openshift-ovn-kubernetes/ovnkube-node-8tcxj" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.640366 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mq886\" (UniqueName: \"kubernetes.io/projected/f7806e48-48e7-4680-af2e-e93a05003370-kube-api-access-mq886\") pod \"ovnkube-node-8tcxj\" (UID: \"f7806e48-48e7-4680-af2e-e93a05003370\") " pod="openshift-ovn-kubernetes/ovnkube-node-8tcxj" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.648728 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43e570e7b8391b0f38f3228dc0380ac8ee23e83f498989343bdee6a53c3ede41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:37Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.673110 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:37Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.698552 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 07:56:37 crc kubenswrapper[4875]: E1007 07:56:37.698665 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.698752 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 07:56:37 crc kubenswrapper[4875]: E1007 07:56:37.698803 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.702790 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wc2jq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e5a790e1-c591-4cfc-930f-4805a923790b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4s782\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wc2jq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:37Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.703450 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5225d0e4-402f-4861-b410-819f433b1803" path="/var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.725338 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3928c10c-c3da-41eb-96b2-629d67cfb31f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qd4f6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qd4f6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hx68m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:37Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.745264 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"01cd6f59-4f16-41d3-b43b-cd3cb9efd9b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ca1c70fa4c2dd9e87cc15766e27c2735768f392d019d6004ae50c94595651c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4bed5cf79847998d3d72c69b2f5773d64affb1a1f13ecc3af99bda8307bb8d1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ba6ab5ec6a70a00059ba119970c6b139879c3c568475f2acf45b00f4ade9e22\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://671f4d3e518bf0230af61a6bfc3f70b8cea7f0e8f972ad605a78cded0306b084\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ed3180f0badc58d1421537275fe9a04a086a483149f54192c9d8eae4b4c3dea2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-07T07:56:29Z\\\",\\\"message\\\":\\\"W1007 07:56:18.835173 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1007 07:56:18.835622 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759823778 cert, and key in /tmp/serving-cert-1609550910/serving-signer.crt, /tmp/serving-cert-1609550910/serving-signer.key\\\\nI1007 07:56:19.144033 1 observer_polling.go:159] Starting file observer\\\\nW1007 07:56:19.147193 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1007 07:56:19.147327 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1007 07:56:19.149025 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1609550910/tls.crt::/tmp/serving-cert-1609550910/tls.key\\\\\\\"\\\\nF1007 07:56:29.310074 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://671f4d3e518bf0230af61a6bfc3f70b8cea7f0e8f972ad605a78cded0306b084\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"message\\\":\\\"le observer\\\\nW1007 07:56:35.379471 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1007 07:56:35.379613 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1007 07:56:35.380419 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1733405231/tls.crt::/tmp/serving-cert-1733405231/tls.key\\\\\\\"\\\\nI1007 07:56:35.960385 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1007 07:56:35.963357 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1007 07:56:35.963380 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1007 07:56:35.963401 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1007 07:56:35.963407 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1007 07:56:35.978178 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1007 07:56:35.978231 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1007 07:56:35.978242 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI1007 07:56:35.978236 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1007 07:56:35.978254 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1007 07:56:35.978304 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1007 07:56:35.978312 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1007 07:56:35.978319 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1007 07:56:35.979012 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0b72e1d82ba1cfc118ea122535db5c358f503a17b497c21c481e1b56bb578e8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec502d866371b40ecc3c8728889725cc1c2d9c8be18dbce17935992980fc1b9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec502d866371b40ecc3c8728889725cc1c2d9c8be18dbce17935992980fc1b9a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:37Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.765587 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-9tw9m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f8b02a8a-b8d2-4097-b768-132e4c46938a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-9tw9m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:37Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.772975 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-8tcxj" Oct 07 07:56:37 crc kubenswrapper[4875]: W1007 07:56:37.790182 4875 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf7806e48_48e7_4680_af2e_e93a05003370.slice/crio-d61778870f0064b780d9a5025459e730041a035bf352302f1c89a14da6660fee WatchSource:0}: Error finding container d61778870f0064b780d9a5025459e730041a035bf352302f1c89a14da6660fee: Status 404 returned error can't find the container with id d61778870f0064b780d9a5025459e730041a035bf352302f1c89a14da6660fee Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.791212 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:37Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.806632 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:37Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.820590 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-wc2jq" event={"ID":"e5a790e1-c591-4cfc-930f-4805a923790b","Type":"ContainerStarted","Data":"0fda9fd366641ca5799c0ca6c7aa926af6b41609e157c11a15b2077d4a5ce3aa"} Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.820661 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-wc2jq" event={"ID":"e5a790e1-c591-4cfc-930f-4805a923790b","Type":"ContainerStarted","Data":"a76bb2409e1b02b1590f6ff0cc5498f1e03bb5ba2e45e5c952b50ca895fca636"} Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.825226 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-zk2kz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7021fc56-b485-4ca6-80e8-56665ade004f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-zk2kz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:37Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.833274 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" event={"ID":"3928c10c-c3da-41eb-96b2-629d67cfb31f","Type":"ContainerStarted","Data":"27e5a1d7a65296f23b224f9ace2760c32b6e1ca146cf7a42a601de22632250d5"} Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.833329 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" event={"ID":"3928c10c-c3da-41eb-96b2-629d67cfb31f","Type":"ContainerStarted","Data":"34b235fca1c383e868641a953bb908901bcd10528ee9fb668c1f71eb6e0e978a"} Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.833343 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" event={"ID":"3928c10c-c3da-41eb-96b2-629d67cfb31f","Type":"ContainerStarted","Data":"a519560d46e1eed7a1ef2830b01d938a7a9179dc4300cbb59df0dc1ef42f27cc"} Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.835422 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-9tw9m" event={"ID":"f8b02a8a-b8d2-4097-b768-132e4c46938a","Type":"ContainerStarted","Data":"2f6efce0afada950650dfd1a7345a5132509ead755458e53915fcf3335ffe73e"} Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.835474 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-9tw9m" event={"ID":"f8b02a8a-b8d2-4097-b768-132e4c46938a","Type":"ContainerStarted","Data":"82d6895fe3bc7d82dc07fee9cb6d6edb81ec251486345fdde0d9264a7feaaa5d"} Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.838917 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-zk2kz" event={"ID":"7021fc56-b485-4ca6-80e8-56665ade004f","Type":"ContainerStarted","Data":"3d91377f30f2c7676df79d679bad49e0a162667214bd7e6ed4ad8ef3606a8752"} Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.838959 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-zk2kz" event={"ID":"7021fc56-b485-4ca6-80e8-56665ade004f","Type":"ContainerStarted","Data":"f02de939f0ed8a8e2e26bca2757f6cdca92790ebd3300f0908ed1b3cd3cc1c7b"} Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.841047 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/1.log" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.843160 4875 scope.go:117] "RemoveContainer" containerID="671f4d3e518bf0230af61a6bfc3f70b8cea7f0e8f972ad605a78cded0306b084" Oct 07 07:56:37 crc kubenswrapper[4875]: E1007 07:56:37.843345 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-apiserver-check-endpoints\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\"" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.847516 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3527b3b9-1734-481d-9d80-9093e388afec\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7149243f8781d5a5e845821a8016bc06ec3678d69dfbde72d3429c48c64a98b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51d55feee1c9378afae0885cd063f39a2b4bd057db72c776941837d8d4098132\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6dc29635af9843109e3c200fc756d352e1a81b01dcc52a30a005291d1fa9f8d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e0b3573713f1b9d23177e5a1412d8db366ca718e6c19f6af9e234940505f2e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20382715fd9d79a3b60b16d950d6c4fe5b0d34ae14ad034f684d97762205263a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cfae59bc7d6183b2b5c6c04dd53b1bfc8b62abf90f67f74c7a56bf5039a5b50a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cfae59bc7d6183b2b5c6c04dd53b1bfc8b62abf90f67f74c7a56bf5039a5b50a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88968e9f639dc3664a87729128c531283b37b7c2da6aae70905f9f63a980fb54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88968e9f639dc3664a87729128c531283b37b7c2da6aae70905f9f63a980fb54\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://1ae0cbc996e8c2d24e8fbbf34b292d994bcc704c7b27fdc8b7fb7e64be0902ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1ae0cbc996e8c2d24e8fbbf34b292d994bcc704c7b27fdc8b7fb7e64be0902ca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:15Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:37Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.848338 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-8tcxj" event={"ID":"f7806e48-48e7-4680-af2e-e93a05003370","Type":"ContainerStarted","Data":"d61778870f0064b780d9a5025459e730041a035bf352302f1c89a14da6660fee"} Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.876215 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://455d92e6c0a43eabcbdc94940cb5bb0308a747e311fb79711b5ec8586d2f290c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e98da4809cdbbaa0bc7a3ed73df6eeb9260101f8336e16e3e93cba2be423f957\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:37Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.919985 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-8tcxj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f7806e48-48e7-4680-af2e-e93a05003370\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-8tcxj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:37Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.962300 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62ae4c53-7cf1-4229-84b4-045397dbcfba\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ae8881986c1b2abd841adfadbac7dc3d73096c5366485350536c08f478f9762\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://05f20f2103458077317614bcd5338d803c52c67e7dfc562103c817d33fa5bc79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f2213a20d36b8a125c6004957d5be402e0b85bbc2165ebd964ab499dfffb877\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://841f5c2218ad3912bf23dc2df80f24ca696829ba8e2c2d9722264688b25f0846\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:15Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:37Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:37 crc kubenswrapper[4875]: I1007 07:56:37.997972 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62ae4c53-7cf1-4229-84b4-045397dbcfba\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ae8881986c1b2abd841adfadbac7dc3d73096c5366485350536c08f478f9762\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://05f20f2103458077317614bcd5338d803c52c67e7dfc562103c817d33fa5bc79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f2213a20d36b8a125c6004957d5be402e0b85bbc2165ebd964ab499dfffb877\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://841f5c2218ad3912bf23dc2df80f24ca696829ba8e2c2d9722264688b25f0846\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:15Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:37Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:38 crc kubenswrapper[4875]: I1007 07:56:38.040347 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3527b3b9-1734-481d-9d80-9093e388afec\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7149243f8781d5a5e845821a8016bc06ec3678d69dfbde72d3429c48c64a98b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51d55feee1c9378afae0885cd063f39a2b4bd057db72c776941837d8d4098132\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6dc29635af9843109e3c200fc756d352e1a81b01dcc52a30a005291d1fa9f8d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e0b3573713f1b9d23177e5a1412d8db366ca718e6c19f6af9e234940505f2e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20382715fd9d79a3b60b16d950d6c4fe5b0d34ae14ad034f684d97762205263a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cfae59bc7d6183b2b5c6c04dd53b1bfc8b62abf90f67f74c7a56bf5039a5b50a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cfae59bc7d6183b2b5c6c04dd53b1bfc8b62abf90f67f74c7a56bf5039a5b50a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88968e9f639dc3664a87729128c531283b37b7c2da6aae70905f9f63a980fb54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88968e9f639dc3664a87729128c531283b37b7c2da6aae70905f9f63a980fb54\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://1ae0cbc996e8c2d24e8fbbf34b292d994bcc704c7b27fdc8b7fb7e64be0902ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1ae0cbc996e8c2d24e8fbbf34b292d994bcc704c7b27fdc8b7fb7e64be0902ca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:15Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:38Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:38 crc kubenswrapper[4875]: I1007 07:56:38.075694 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://455d92e6c0a43eabcbdc94940cb5bb0308a747e311fb79711b5ec8586d2f290c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e98da4809cdbbaa0bc7a3ed73df6eeb9260101f8336e16e3e93cba2be423f957\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:38Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:38 crc kubenswrapper[4875]: I1007 07:56:38.117754 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-8tcxj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f7806e48-48e7-4680-af2e-e93a05003370\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-8tcxj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:38Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:38 crc kubenswrapper[4875]: I1007 07:56:38.156722 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:38Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:38 crc kubenswrapper[4875]: I1007 07:56:38.196081 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wc2jq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e5a790e1-c591-4cfc-930f-4805a923790b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0fda9fd366641ca5799c0ca6c7aa926af6b41609e157c11a15b2077d4a5ce3aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4s782\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wc2jq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:38Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:38 crc kubenswrapper[4875]: I1007 07:56:38.245634 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3928c10c-c3da-41eb-96b2-629d67cfb31f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qd4f6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qd4f6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hx68m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:38Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:38 crc kubenswrapper[4875]: I1007 07:56:38.275919 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"01cd6f59-4f16-41d3-b43b-cd3cb9efd9b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ca1c70fa4c2dd9e87cc15766e27c2735768f392d019d6004ae50c94595651c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4bed5cf79847998d3d72c69b2f5773d64affb1a1f13ecc3af99bda8307bb8d1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ba6ab5ec6a70a00059ba119970c6b139879c3c568475f2acf45b00f4ade9e22\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://671f4d3e518bf0230af61a6bfc3f70b8cea7f0e8f972ad605a78cded0306b084\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://671f4d3e518bf0230af61a6bfc3f70b8cea7f0e8f972ad605a78cded0306b084\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"message\\\":\\\"le observer\\\\nW1007 07:56:35.379471 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1007 07:56:35.379613 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1007 07:56:35.380419 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1733405231/tls.crt::/tmp/serving-cert-1733405231/tls.key\\\\\\\"\\\\nI1007 07:56:35.960385 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1007 07:56:35.963357 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1007 07:56:35.963380 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1007 07:56:35.963401 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1007 07:56:35.963407 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1007 07:56:35.978178 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1007 07:56:35.978231 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1007 07:56:35.978242 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI1007 07:56:35.978236 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1007 07:56:35.978254 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1007 07:56:35.978304 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1007 07:56:35.978312 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1007 07:56:35.978319 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1007 07:56:35.979012 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0b72e1d82ba1cfc118ea122535db5c358f503a17b497c21c481e1b56bb578e8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec502d866371b40ecc3c8728889725cc1c2d9c8be18dbce17935992980fc1b9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec502d866371b40ecc3c8728889725cc1c2d9c8be18dbce17935992980fc1b9a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:38Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:38 crc kubenswrapper[4875]: I1007 07:56:38.312361 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:38Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:38 crc kubenswrapper[4875]: I1007 07:56:38.364810 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43e570e7b8391b0f38f3228dc0380ac8ee23e83f498989343bdee6a53c3ede41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:38Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:38 crc kubenswrapper[4875]: I1007 07:56:38.394534 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:38Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:38 crc kubenswrapper[4875]: I1007 07:56:38.437475 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-9tw9m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f8b02a8a-b8d2-4097-b768-132e4c46938a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f6efce0afada950650dfd1a7345a5132509ead755458e53915fcf3335ffe73e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-9tw9m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:38Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:38 crc kubenswrapper[4875]: I1007 07:56:38.474612 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:38Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:38 crc kubenswrapper[4875]: I1007 07:56:38.517565 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-zk2kz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7021fc56-b485-4ca6-80e8-56665ade004f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d91377f30f2c7676df79d679bad49e0a162667214bd7e6ed4ad8ef3606a8752\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-zk2kz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:38Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:38 crc kubenswrapper[4875]: I1007 07:56:38.696527 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 07:56:38 crc kubenswrapper[4875]: E1007 07:56:38.696683 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 07:56:38 crc kubenswrapper[4875]: I1007 07:56:38.852030 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"01fe5bea18c473e716831e2a9a876a8517d6de78b9b870929ecc1c4397d33fb3"} Oct 07 07:56:38 crc kubenswrapper[4875]: I1007 07:56:38.853534 4875 generic.go:334] "Generic (PLEG): container finished" podID="f7806e48-48e7-4680-af2e-e93a05003370" containerID="e7cdfd8fed2f21e7063841e5382e668130288feffd52ef6fc5f9b5bc1894f2a9" exitCode=0 Oct 07 07:56:38 crc kubenswrapper[4875]: I1007 07:56:38.853616 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-8tcxj" event={"ID":"f7806e48-48e7-4680-af2e-e93a05003370","Type":"ContainerDied","Data":"e7cdfd8fed2f21e7063841e5382e668130288feffd52ef6fc5f9b5bc1894f2a9"} Oct 07 07:56:38 crc kubenswrapper[4875]: I1007 07:56:38.855426 4875 generic.go:334] "Generic (PLEG): container finished" podID="7021fc56-b485-4ca6-80e8-56665ade004f" containerID="3d91377f30f2c7676df79d679bad49e0a162667214bd7e6ed4ad8ef3606a8752" exitCode=0 Oct 07 07:56:38 crc kubenswrapper[4875]: I1007 07:56:38.855493 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-zk2kz" event={"ID":"7021fc56-b485-4ca6-80e8-56665ade004f","Type":"ContainerDied","Data":"3d91377f30f2c7676df79d679bad49e0a162667214bd7e6ed4ad8ef3606a8752"} Oct 07 07:56:38 crc kubenswrapper[4875]: I1007 07:56:38.878649 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:38Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:38 crc kubenswrapper[4875]: I1007 07:56:38.888293 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-9tw9m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f8b02a8a-b8d2-4097-b768-132e4c46938a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f6efce0afada950650dfd1a7345a5132509ead755458e53915fcf3335ffe73e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-9tw9m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:38Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:38 crc kubenswrapper[4875]: I1007 07:56:38.903689 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-zk2kz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7021fc56-b485-4ca6-80e8-56665ade004f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d91377f30f2c7676df79d679bad49e0a162667214bd7e6ed4ad8ef3606a8752\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-zk2kz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:38Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:38 crc kubenswrapper[4875]: I1007 07:56:38.917771 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:38Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:38 crc kubenswrapper[4875]: I1007 07:56:38.931766 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://455d92e6c0a43eabcbdc94940cb5bb0308a747e311fb79711b5ec8586d2f290c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e98da4809cdbbaa0bc7a3ed73df6eeb9260101f8336e16e3e93cba2be423f957\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:38Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:38 crc kubenswrapper[4875]: I1007 07:56:38.959521 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-8tcxj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f7806e48-48e7-4680-af2e-e93a05003370\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-8tcxj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:38Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:38 crc kubenswrapper[4875]: I1007 07:56:38.973070 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62ae4c53-7cf1-4229-84b4-045397dbcfba\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ae8881986c1b2abd841adfadbac7dc3d73096c5366485350536c08f478f9762\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://05f20f2103458077317614bcd5338d803c52c67e7dfc562103c817d33fa5bc79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f2213a20d36b8a125c6004957d5be402e0b85bbc2165ebd964ab499dfffb877\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://841f5c2218ad3912bf23dc2df80f24ca696829ba8e2c2d9722264688b25f0846\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:15Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:38Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:38 crc kubenswrapper[4875]: I1007 07:56:38.997688 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3527b3b9-1734-481d-9d80-9093e388afec\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7149243f8781d5a5e845821a8016bc06ec3678d69dfbde72d3429c48c64a98b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51d55feee1c9378afae0885cd063f39a2b4bd057db72c776941837d8d4098132\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6dc29635af9843109e3c200fc756d352e1a81b01dcc52a30a005291d1fa9f8d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e0b3573713f1b9d23177e5a1412d8db366ca718e6c19f6af9e234940505f2e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20382715fd9d79a3b60b16d950d6c4fe5b0d34ae14ad034f684d97762205263a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cfae59bc7d6183b2b5c6c04dd53b1bfc8b62abf90f67f74c7a56bf5039a5b50a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cfae59bc7d6183b2b5c6c04dd53b1bfc8b62abf90f67f74c7a56bf5039a5b50a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88968e9f639dc3664a87729128c531283b37b7c2da6aae70905f9f63a980fb54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88968e9f639dc3664a87729128c531283b37b7c2da6aae70905f9f63a980fb54\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://1ae0cbc996e8c2d24e8fbbf34b292d994bcc704c7b27fdc8b7fb7e64be0902ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1ae0cbc996e8c2d24e8fbbf34b292d994bcc704c7b27fdc8b7fb7e64be0902ca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:15Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:38Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:39 crc kubenswrapper[4875]: I1007 07:56:39.010967 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:39Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:39 crc kubenswrapper[4875]: I1007 07:56:39.023397 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43e570e7b8391b0f38f3228dc0380ac8ee23e83f498989343bdee6a53c3ede41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:39Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:39 crc kubenswrapper[4875]: I1007 07:56:39.033562 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01fe5bea18c473e716831e2a9a876a8517d6de78b9b870929ecc1c4397d33fb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:39Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:39 crc kubenswrapper[4875]: I1007 07:56:39.045121 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wc2jq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e5a790e1-c591-4cfc-930f-4805a923790b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0fda9fd366641ca5799c0ca6c7aa926af6b41609e157c11a15b2077d4a5ce3aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4s782\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wc2jq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:39Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:39 crc kubenswrapper[4875]: I1007 07:56:39.055734 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3928c10c-c3da-41eb-96b2-629d67cfb31f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qd4f6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qd4f6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hx68m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:39Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:39 crc kubenswrapper[4875]: I1007 07:56:39.074588 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"01cd6f59-4f16-41d3-b43b-cd3cb9efd9b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ca1c70fa4c2dd9e87cc15766e27c2735768f392d019d6004ae50c94595651c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4bed5cf79847998d3d72c69b2f5773d64affb1a1f13ecc3af99bda8307bb8d1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ba6ab5ec6a70a00059ba119970c6b139879c3c568475f2acf45b00f4ade9e22\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://671f4d3e518bf0230af61a6bfc3f70b8cea7f0e8f972ad605a78cded0306b084\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://671f4d3e518bf0230af61a6bfc3f70b8cea7f0e8f972ad605a78cded0306b084\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"message\\\":\\\"le observer\\\\nW1007 07:56:35.379471 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1007 07:56:35.379613 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1007 07:56:35.380419 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1733405231/tls.crt::/tmp/serving-cert-1733405231/tls.key\\\\\\\"\\\\nI1007 07:56:35.960385 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1007 07:56:35.963357 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1007 07:56:35.963380 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1007 07:56:35.963401 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1007 07:56:35.963407 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1007 07:56:35.978178 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1007 07:56:35.978231 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1007 07:56:35.978242 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI1007 07:56:35.978236 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1007 07:56:35.978254 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1007 07:56:35.978304 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1007 07:56:35.978312 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1007 07:56:35.978319 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1007 07:56:35.979012 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0b72e1d82ba1cfc118ea122535db5c358f503a17b497c21c481e1b56bb578e8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec502d866371b40ecc3c8728889725cc1c2d9c8be18dbce17935992980fc1b9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec502d866371b40ecc3c8728889725cc1c2d9c8be18dbce17935992980fc1b9a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:39Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:39 crc kubenswrapper[4875]: I1007 07:56:39.112495 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:39Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:39 crc kubenswrapper[4875]: I1007 07:56:39.154818 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-zk2kz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7021fc56-b485-4ca6-80e8-56665ade004f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d91377f30f2c7676df79d679bad49e0a162667214bd7e6ed4ad8ef3606a8752\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d91377f30f2c7676df79d679bad49e0a162667214bd7e6ed4ad8ef3606a8752\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-zk2kz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:39Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:39 crc kubenswrapper[4875]: I1007 07:56:39.193741 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62ae4c53-7cf1-4229-84b4-045397dbcfba\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ae8881986c1b2abd841adfadbac7dc3d73096c5366485350536c08f478f9762\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://05f20f2103458077317614bcd5338d803c52c67e7dfc562103c817d33fa5bc79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f2213a20d36b8a125c6004957d5be402e0b85bbc2165ebd964ab499dfffb877\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://841f5c2218ad3912bf23dc2df80f24ca696829ba8e2c2d9722264688b25f0846\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:15Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:39Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:39 crc kubenswrapper[4875]: I1007 07:56:39.238791 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3527b3b9-1734-481d-9d80-9093e388afec\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7149243f8781d5a5e845821a8016bc06ec3678d69dfbde72d3429c48c64a98b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51d55feee1c9378afae0885cd063f39a2b4bd057db72c776941837d8d4098132\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6dc29635af9843109e3c200fc756d352e1a81b01dcc52a30a005291d1fa9f8d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e0b3573713f1b9d23177e5a1412d8db366ca718e6c19f6af9e234940505f2e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20382715fd9d79a3b60b16d950d6c4fe5b0d34ae14ad034f684d97762205263a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cfae59bc7d6183b2b5c6c04dd53b1bfc8b62abf90f67f74c7a56bf5039a5b50a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cfae59bc7d6183b2b5c6c04dd53b1bfc8b62abf90f67f74c7a56bf5039a5b50a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88968e9f639dc3664a87729128c531283b37b7c2da6aae70905f9f63a980fb54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88968e9f639dc3664a87729128c531283b37b7c2da6aae70905f9f63a980fb54\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://1ae0cbc996e8c2d24e8fbbf34b292d994bcc704c7b27fdc8b7fb7e64be0902ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1ae0cbc996e8c2d24e8fbbf34b292d994bcc704c7b27fdc8b7fb7e64be0902ca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:15Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:39Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:39 crc kubenswrapper[4875]: I1007 07:56:39.277187 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://455d92e6c0a43eabcbdc94940cb5bb0308a747e311fb79711b5ec8586d2f290c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e98da4809cdbbaa0bc7a3ed73df6eeb9260101f8336e16e3e93cba2be423f957\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:39Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:39 crc kubenswrapper[4875]: I1007 07:56:39.317823 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-8tcxj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f7806e48-48e7-4680-af2e-e93a05003370\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e7cdfd8fed2f21e7063841e5382e668130288feffd52ef6fc5f9b5bc1894f2a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e7cdfd8fed2f21e7063841e5382e668130288feffd52ef6fc5f9b5bc1894f2a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-8tcxj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:39Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:39 crc kubenswrapper[4875]: I1007 07:56:39.338108 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 07:56:39 crc kubenswrapper[4875]: I1007 07:56:39.338220 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 07:56:39 crc kubenswrapper[4875]: I1007 07:56:39.338252 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 07:56:39 crc kubenswrapper[4875]: I1007 07:56:39.338274 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 07:56:39 crc kubenswrapper[4875]: E1007 07:56:39.338323 4875 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 07:56:43.338294179 +0000 UTC m=+28.298064722 (durationBeforeRetry 4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 07:56:39 crc kubenswrapper[4875]: I1007 07:56:39.338370 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 07:56:39 crc kubenswrapper[4875]: E1007 07:56:39.338379 4875 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 07 07:56:39 crc kubenswrapper[4875]: E1007 07:56:39.338491 4875 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-07 07:56:43.338481975 +0000 UTC m=+28.298252518 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 07 07:56:39 crc kubenswrapper[4875]: E1007 07:56:39.338489 4875 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 07 07:56:39 crc kubenswrapper[4875]: E1007 07:56:39.338514 4875 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 07 07:56:39 crc kubenswrapper[4875]: E1007 07:56:39.338527 4875 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 07 07:56:39 crc kubenswrapper[4875]: E1007 07:56:39.338578 4875 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-07 07:56:43.338561308 +0000 UTC m=+28.298331931 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 07 07:56:39 crc kubenswrapper[4875]: E1007 07:56:39.338445 4875 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 07 07:56:39 crc kubenswrapper[4875]: E1007 07:56:39.338603 4875 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 07 07:56:39 crc kubenswrapper[4875]: E1007 07:56:39.338627 4875 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 07 07:56:39 crc kubenswrapper[4875]: E1007 07:56:39.338654 4875 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-07 07:56:43.33864532 +0000 UTC m=+28.298415983 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 07 07:56:39 crc kubenswrapper[4875]: E1007 07:56:39.338692 4875 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 07 07:56:39 crc kubenswrapper[4875]: E1007 07:56:39.338719 4875 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-07 07:56:43.338710723 +0000 UTC m=+28.298481386 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 07 07:56:39 crc kubenswrapper[4875]: I1007 07:56:39.353679 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01fe5bea18c473e716831e2a9a876a8517d6de78b9b870929ecc1c4397d33fb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:39Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:39 crc kubenswrapper[4875]: I1007 07:56:39.393702 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wc2jq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e5a790e1-c591-4cfc-930f-4805a923790b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0fda9fd366641ca5799c0ca6c7aa926af6b41609e157c11a15b2077d4a5ce3aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4s782\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wc2jq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:39Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:39 crc kubenswrapper[4875]: I1007 07:56:39.438363 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3928c10c-c3da-41eb-96b2-629d67cfb31f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://27e5a1d7a65296f23b224f9ace2760c32b6e1ca146cf7a42a601de22632250d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qd4f6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34b235fca1c383e868641a953bb908901bcd10528ee9fb668c1f71eb6e0e978a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qd4f6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hx68m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:39Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:39 crc kubenswrapper[4875]: I1007 07:56:39.475893 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"01cd6f59-4f16-41d3-b43b-cd3cb9efd9b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ca1c70fa4c2dd9e87cc15766e27c2735768f392d019d6004ae50c94595651c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4bed5cf79847998d3d72c69b2f5773d64affb1a1f13ecc3af99bda8307bb8d1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ba6ab5ec6a70a00059ba119970c6b139879c3c568475f2acf45b00f4ade9e22\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://671f4d3e518bf0230af61a6bfc3f70b8cea7f0e8f972ad605a78cded0306b084\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://671f4d3e518bf0230af61a6bfc3f70b8cea7f0e8f972ad605a78cded0306b084\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"message\\\":\\\"le observer\\\\nW1007 07:56:35.379471 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1007 07:56:35.379613 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1007 07:56:35.380419 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1733405231/tls.crt::/tmp/serving-cert-1733405231/tls.key\\\\\\\"\\\\nI1007 07:56:35.960385 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1007 07:56:35.963357 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1007 07:56:35.963380 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1007 07:56:35.963401 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1007 07:56:35.963407 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1007 07:56:35.978178 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1007 07:56:35.978231 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1007 07:56:35.978242 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI1007 07:56:35.978236 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1007 07:56:35.978254 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1007 07:56:35.978304 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1007 07:56:35.978312 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1007 07:56:35.978319 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1007 07:56:35.979012 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0b72e1d82ba1cfc118ea122535db5c358f503a17b497c21c481e1b56bb578e8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec502d866371b40ecc3c8728889725cc1c2d9c8be18dbce17935992980fc1b9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec502d866371b40ecc3c8728889725cc1c2d9c8be18dbce17935992980fc1b9a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:39Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:39 crc kubenswrapper[4875]: I1007 07:56:39.514522 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:39Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:39 crc kubenswrapper[4875]: I1007 07:56:39.564806 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43e570e7b8391b0f38f3228dc0380ac8ee23e83f498989343bdee6a53c3ede41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:39Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:39 crc kubenswrapper[4875]: I1007 07:56:39.592982 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:39Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:39 crc kubenswrapper[4875]: I1007 07:56:39.635210 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-9tw9m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f8b02a8a-b8d2-4097-b768-132e4c46938a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f6efce0afada950650dfd1a7345a5132509ead755458e53915fcf3335ffe73e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-9tw9m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:39Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:39 crc kubenswrapper[4875]: I1007 07:56:39.697025 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 07:56:39 crc kubenswrapper[4875]: I1007 07:56:39.697092 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 07:56:39 crc kubenswrapper[4875]: E1007 07:56:39.697150 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 07:56:39 crc kubenswrapper[4875]: E1007 07:56:39.697230 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 07:56:39 crc kubenswrapper[4875]: I1007 07:56:39.717370 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/node-ca-ddx6l"] Oct 07 07:56:39 crc kubenswrapper[4875]: I1007 07:56:39.717959 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-ddx6l" Oct 07 07:56:39 crc kubenswrapper[4875]: W1007 07:56:39.720158 4875 reflector.go:561] object-"openshift-image-registry"/"kube-root-ca.crt": failed to list *v1.ConfigMap: configmaps "kube-root-ca.crt" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-image-registry": no relationship found between node 'crc' and this object Oct 07 07:56:39 crc kubenswrapper[4875]: E1007 07:56:39.720310 4875 reflector.go:158] "Unhandled Error" err="object-\"openshift-image-registry\"/\"kube-root-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-root-ca.crt\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-image-registry\": no relationship found between node 'crc' and this object" logger="UnhandledError" Oct 07 07:56:39 crc kubenswrapper[4875]: W1007 07:56:39.720241 4875 reflector.go:561] object-"openshift-image-registry"/"node-ca-dockercfg-4777p": failed to list *v1.Secret: secrets "node-ca-dockercfg-4777p" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-image-registry": no relationship found between node 'crc' and this object Oct 07 07:56:39 crc kubenswrapper[4875]: E1007 07:56:39.720345 4875 reflector.go:158] "Unhandled Error" err="object-\"openshift-image-registry\"/\"node-ca-dockercfg-4777p\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"node-ca-dockercfg-4777p\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-image-registry\": no relationship found between node 'crc' and this object" logger="UnhandledError" Oct 07 07:56:39 crc kubenswrapper[4875]: W1007 07:56:39.720523 4875 reflector.go:561] object-"openshift-image-registry"/"image-registry-certificates": failed to list *v1.ConfigMap: configmaps "image-registry-certificates" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-image-registry": no relationship found between node 'crc' and this object Oct 07 07:56:39 crc kubenswrapper[4875]: E1007 07:56:39.720549 4875 reflector.go:158] "Unhandled Error" err="object-\"openshift-image-registry\"/\"image-registry-certificates\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"image-registry-certificates\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-image-registry\": no relationship found between node 'crc' and this object" logger="UnhandledError" Oct 07 07:56:39 crc kubenswrapper[4875]: I1007 07:56:39.725790 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Oct 07 07:56:39 crc kubenswrapper[4875]: I1007 07:56:39.753822 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:39Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:39 crc kubenswrapper[4875]: I1007 07:56:39.796153 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-zk2kz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7021fc56-b485-4ca6-80e8-56665ade004f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d91377f30f2c7676df79d679bad49e0a162667214bd7e6ed4ad8ef3606a8752\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d91377f30f2c7676df79d679bad49e0a162667214bd7e6ed4ad8ef3606a8752\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-zk2kz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:39Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:39 crc kubenswrapper[4875]: I1007 07:56:39.837424 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62ae4c53-7cf1-4229-84b4-045397dbcfba\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ae8881986c1b2abd841adfadbac7dc3d73096c5366485350536c08f478f9762\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://05f20f2103458077317614bcd5338d803c52c67e7dfc562103c817d33fa5bc79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f2213a20d36b8a125c6004957d5be402e0b85bbc2165ebd964ab499dfffb877\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://841f5c2218ad3912bf23dc2df80f24ca696829ba8e2c2d9722264688b25f0846\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:15Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:39Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:39 crc kubenswrapper[4875]: I1007 07:56:39.841688 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l9bwf\" (UniqueName: \"kubernetes.io/projected/d0605d52-0cfc-4bcf-9218-1991257047cd-kube-api-access-l9bwf\") pod \"node-ca-ddx6l\" (UID: \"d0605d52-0cfc-4bcf-9218-1991257047cd\") " pod="openshift-image-registry/node-ca-ddx6l" Oct 07 07:56:39 crc kubenswrapper[4875]: I1007 07:56:39.841781 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/d0605d52-0cfc-4bcf-9218-1991257047cd-host\") pod \"node-ca-ddx6l\" (UID: \"d0605d52-0cfc-4bcf-9218-1991257047cd\") " pod="openshift-image-registry/node-ca-ddx6l" Oct 07 07:56:39 crc kubenswrapper[4875]: I1007 07:56:39.841809 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/d0605d52-0cfc-4bcf-9218-1991257047cd-serviceca\") pod \"node-ca-ddx6l\" (UID: \"d0605d52-0cfc-4bcf-9218-1991257047cd\") " pod="openshift-image-registry/node-ca-ddx6l" Oct 07 07:56:39 crc kubenswrapper[4875]: I1007 07:56:39.871704 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-zk2kz" event={"ID":"7021fc56-b485-4ca6-80e8-56665ade004f","Type":"ContainerStarted","Data":"ecf0876b0824ab4fd9c692b0455050ac0a2d61663f74f13f578aebd6f3869f3f"} Oct 07 07:56:39 crc kubenswrapper[4875]: I1007 07:56:39.874791 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-8tcxj" event={"ID":"f7806e48-48e7-4680-af2e-e93a05003370","Type":"ContainerStarted","Data":"8c31cbcb48e003dac873f108a5eb3aaff8d7016f7557fb683ae607d7819d7ceb"} Oct 07 07:56:39 crc kubenswrapper[4875]: I1007 07:56:39.874810 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-8tcxj" event={"ID":"f7806e48-48e7-4680-af2e-e93a05003370","Type":"ContainerStarted","Data":"2df6aadbe568d17b58738f194596f0e83432176769f7c5bd87e7d4000d3f1ed8"} Oct 07 07:56:39 crc kubenswrapper[4875]: I1007 07:56:39.874819 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-8tcxj" event={"ID":"f7806e48-48e7-4680-af2e-e93a05003370","Type":"ContainerStarted","Data":"e5e9d96fa7102ca0dcbb0fcc156b21899217a32baae85be23379cca6c8a6a93c"} Oct 07 07:56:39 crc kubenswrapper[4875]: I1007 07:56:39.874827 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-8tcxj" event={"ID":"f7806e48-48e7-4680-af2e-e93a05003370","Type":"ContainerStarted","Data":"3c944396320312ab92eb8dd7f1d771a00b965031717edbbeb2b467cee0788dec"} Oct 07 07:56:39 crc kubenswrapper[4875]: I1007 07:56:39.874835 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-8tcxj" event={"ID":"f7806e48-48e7-4680-af2e-e93a05003370","Type":"ContainerStarted","Data":"6db9e77922c120dbfa1ccd55c96777ac3b18e0fcf8bb96956414858ae2c8fcff"} Oct 07 07:56:39 crc kubenswrapper[4875]: I1007 07:56:39.891035 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3527b3b9-1734-481d-9d80-9093e388afec\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7149243f8781d5a5e845821a8016bc06ec3678d69dfbde72d3429c48c64a98b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51d55feee1c9378afae0885cd063f39a2b4bd057db72c776941837d8d4098132\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6dc29635af9843109e3c200fc756d352e1a81b01dcc52a30a005291d1fa9f8d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e0b3573713f1b9d23177e5a1412d8db366ca718e6c19f6af9e234940505f2e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20382715fd9d79a3b60b16d950d6c4fe5b0d34ae14ad034f684d97762205263a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cfae59bc7d6183b2b5c6c04dd53b1bfc8b62abf90f67f74c7a56bf5039a5b50a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cfae59bc7d6183b2b5c6c04dd53b1bfc8b62abf90f67f74c7a56bf5039a5b50a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88968e9f639dc3664a87729128c531283b37b7c2da6aae70905f9f63a980fb54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88968e9f639dc3664a87729128c531283b37b7c2da6aae70905f9f63a980fb54\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://1ae0cbc996e8c2d24e8fbbf34b292d994bcc704c7b27fdc8b7fb7e64be0902ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1ae0cbc996e8c2d24e8fbbf34b292d994bcc704c7b27fdc8b7fb7e64be0902ca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:15Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:39Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:39 crc kubenswrapper[4875]: I1007 07:56:39.912796 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://455d92e6c0a43eabcbdc94940cb5bb0308a747e311fb79711b5ec8586d2f290c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e98da4809cdbbaa0bc7a3ed73df6eeb9260101f8336e16e3e93cba2be423f957\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:39Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:39 crc kubenswrapper[4875]: I1007 07:56:39.942544 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/d0605d52-0cfc-4bcf-9218-1991257047cd-host\") pod \"node-ca-ddx6l\" (UID: \"d0605d52-0cfc-4bcf-9218-1991257047cd\") " pod="openshift-image-registry/node-ca-ddx6l" Oct 07 07:56:39 crc kubenswrapper[4875]: I1007 07:56:39.942622 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/d0605d52-0cfc-4bcf-9218-1991257047cd-serviceca\") pod \"node-ca-ddx6l\" (UID: \"d0605d52-0cfc-4bcf-9218-1991257047cd\") " pod="openshift-image-registry/node-ca-ddx6l" Oct 07 07:56:39 crc kubenswrapper[4875]: I1007 07:56:39.942655 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/d0605d52-0cfc-4bcf-9218-1991257047cd-host\") pod \"node-ca-ddx6l\" (UID: \"d0605d52-0cfc-4bcf-9218-1991257047cd\") " pod="openshift-image-registry/node-ca-ddx6l" Oct 07 07:56:39 crc kubenswrapper[4875]: I1007 07:56:39.942669 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l9bwf\" (UniqueName: \"kubernetes.io/projected/d0605d52-0cfc-4bcf-9218-1991257047cd-kube-api-access-l9bwf\") pod \"node-ca-ddx6l\" (UID: \"d0605d52-0cfc-4bcf-9218-1991257047cd\") " pod="openshift-image-registry/node-ca-ddx6l" Oct 07 07:56:39 crc kubenswrapper[4875]: I1007 07:56:39.956646 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-8tcxj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f7806e48-48e7-4680-af2e-e93a05003370\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e7cdfd8fed2f21e7063841e5382e668130288feffd52ef6fc5f9b5bc1894f2a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e7cdfd8fed2f21e7063841e5382e668130288feffd52ef6fc5f9b5bc1894f2a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-8tcxj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:39Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:40 crc kubenswrapper[4875]: I1007 07:56:40.012795 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ddx6l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0605d52-0cfc-4bcf-9218-1991257047cd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:39Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:39Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l9bwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:39Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ddx6l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:40Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:40 crc kubenswrapper[4875]: I1007 07:56:40.054314 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wc2jq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e5a790e1-c591-4cfc-930f-4805a923790b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0fda9fd366641ca5799c0ca6c7aa926af6b41609e157c11a15b2077d4a5ce3aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4s782\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wc2jq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:40Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:40 crc kubenswrapper[4875]: I1007 07:56:40.091653 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3928c10c-c3da-41eb-96b2-629d67cfb31f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://27e5a1d7a65296f23b224f9ace2760c32b6e1ca146cf7a42a601de22632250d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qd4f6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34b235fca1c383e868641a953bb908901bcd10528ee9fb668c1f71eb6e0e978a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qd4f6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hx68m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:40Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:40 crc kubenswrapper[4875]: I1007 07:56:40.136356 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"01cd6f59-4f16-41d3-b43b-cd3cb9efd9b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ca1c70fa4c2dd9e87cc15766e27c2735768f392d019d6004ae50c94595651c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4bed5cf79847998d3d72c69b2f5773d64affb1a1f13ecc3af99bda8307bb8d1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ba6ab5ec6a70a00059ba119970c6b139879c3c568475f2acf45b00f4ade9e22\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://671f4d3e518bf0230af61a6bfc3f70b8cea7f0e8f972ad605a78cded0306b084\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://671f4d3e518bf0230af61a6bfc3f70b8cea7f0e8f972ad605a78cded0306b084\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"message\\\":\\\"le observer\\\\nW1007 07:56:35.379471 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1007 07:56:35.379613 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1007 07:56:35.380419 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1733405231/tls.crt::/tmp/serving-cert-1733405231/tls.key\\\\\\\"\\\\nI1007 07:56:35.960385 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1007 07:56:35.963357 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1007 07:56:35.963380 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1007 07:56:35.963401 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1007 07:56:35.963407 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1007 07:56:35.978178 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1007 07:56:35.978231 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1007 07:56:35.978242 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI1007 07:56:35.978236 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1007 07:56:35.978254 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1007 07:56:35.978304 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1007 07:56:35.978312 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1007 07:56:35.978319 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1007 07:56:35.979012 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0b72e1d82ba1cfc118ea122535db5c358f503a17b497c21c481e1b56bb578e8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec502d866371b40ecc3c8728889725cc1c2d9c8be18dbce17935992980fc1b9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec502d866371b40ecc3c8728889725cc1c2d9c8be18dbce17935992980fc1b9a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:40Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:40 crc kubenswrapper[4875]: I1007 07:56:40.177364 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:40Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:40 crc kubenswrapper[4875]: I1007 07:56:40.214276 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43e570e7b8391b0f38f3228dc0380ac8ee23e83f498989343bdee6a53c3ede41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:40Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:40 crc kubenswrapper[4875]: I1007 07:56:40.253444 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01fe5bea18c473e716831e2a9a876a8517d6de78b9b870929ecc1c4397d33fb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:40Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:40 crc kubenswrapper[4875]: I1007 07:56:40.299707 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:40Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:40 crc kubenswrapper[4875]: I1007 07:56:40.332223 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-9tw9m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f8b02a8a-b8d2-4097-b768-132e4c46938a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f6efce0afada950650dfd1a7345a5132509ead755458e53915fcf3335ffe73e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-9tw9m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:40Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:40 crc kubenswrapper[4875]: I1007 07:56:40.374817 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"01cd6f59-4f16-41d3-b43b-cd3cb9efd9b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ca1c70fa4c2dd9e87cc15766e27c2735768f392d019d6004ae50c94595651c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4bed5cf79847998d3d72c69b2f5773d64affb1a1f13ecc3af99bda8307bb8d1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ba6ab5ec6a70a00059ba119970c6b139879c3c568475f2acf45b00f4ade9e22\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://671f4d3e518bf0230af61a6bfc3f70b8cea7f0e8f972ad605a78cded0306b084\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://671f4d3e518bf0230af61a6bfc3f70b8cea7f0e8f972ad605a78cded0306b084\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"message\\\":\\\"le observer\\\\nW1007 07:56:35.379471 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1007 07:56:35.379613 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1007 07:56:35.380419 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1733405231/tls.crt::/tmp/serving-cert-1733405231/tls.key\\\\\\\"\\\\nI1007 07:56:35.960385 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1007 07:56:35.963357 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1007 07:56:35.963380 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1007 07:56:35.963401 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1007 07:56:35.963407 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1007 07:56:35.978178 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1007 07:56:35.978231 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1007 07:56:35.978242 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI1007 07:56:35.978236 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1007 07:56:35.978254 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1007 07:56:35.978304 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1007 07:56:35.978312 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1007 07:56:35.978319 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1007 07:56:35.979012 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0b72e1d82ba1cfc118ea122535db5c358f503a17b497c21c481e1b56bb578e8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec502d866371b40ecc3c8728889725cc1c2d9c8be18dbce17935992980fc1b9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec502d866371b40ecc3c8728889725cc1c2d9c8be18dbce17935992980fc1b9a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:40Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:40 crc kubenswrapper[4875]: I1007 07:56:40.411911 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:40Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:40 crc kubenswrapper[4875]: I1007 07:56:40.455727 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43e570e7b8391b0f38f3228dc0380ac8ee23e83f498989343bdee6a53c3ede41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:40Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:40 crc kubenswrapper[4875]: I1007 07:56:40.491892 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01fe5bea18c473e716831e2a9a876a8517d6de78b9b870929ecc1c4397d33fb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:40Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:40 crc kubenswrapper[4875]: I1007 07:56:40.535334 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wc2jq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e5a790e1-c591-4cfc-930f-4805a923790b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0fda9fd366641ca5799c0ca6c7aa926af6b41609e157c11a15b2077d4a5ce3aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4s782\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wc2jq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:40Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:40 crc kubenswrapper[4875]: I1007 07:56:40.572380 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3928c10c-c3da-41eb-96b2-629d67cfb31f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://27e5a1d7a65296f23b224f9ace2760c32b6e1ca146cf7a42a601de22632250d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qd4f6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34b235fca1c383e868641a953bb908901bcd10528ee9fb668c1f71eb6e0e978a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qd4f6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hx68m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:40Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:40 crc kubenswrapper[4875]: I1007 07:56:40.613019 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:40Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:40 crc kubenswrapper[4875]: I1007 07:56:40.650476 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-9tw9m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f8b02a8a-b8d2-4097-b768-132e4c46938a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f6efce0afada950650dfd1a7345a5132509ead755458e53915fcf3335ffe73e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-9tw9m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:40Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:40 crc kubenswrapper[4875]: I1007 07:56:40.693057 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:40Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:40 crc kubenswrapper[4875]: I1007 07:56:40.697174 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 07:56:40 crc kubenswrapper[4875]: E1007 07:56:40.697287 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 07:56:40 crc kubenswrapper[4875]: I1007 07:56:40.735437 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-zk2kz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7021fc56-b485-4ca6-80e8-56665ade004f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d91377f30f2c7676df79d679bad49e0a162667214bd7e6ed4ad8ef3606a8752\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d91377f30f2c7676df79d679bad49e0a162667214bd7e6ed4ad8ef3606a8752\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecf0876b0824ab4fd9c692b0455050ac0a2d61663f74f13f578aebd6f3869f3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-zk2kz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:40Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:40 crc kubenswrapper[4875]: I1007 07:56:40.772720 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62ae4c53-7cf1-4229-84b4-045397dbcfba\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ae8881986c1b2abd841adfadbac7dc3d73096c5366485350536c08f478f9762\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://05f20f2103458077317614bcd5338d803c52c67e7dfc562103c817d33fa5bc79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f2213a20d36b8a125c6004957d5be402e0b85bbc2165ebd964ab499dfffb877\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://841f5c2218ad3912bf23dc2df80f24ca696829ba8e2c2d9722264688b25f0846\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:15Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:40Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:40 crc kubenswrapper[4875]: I1007 07:56:40.820434 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3527b3b9-1734-481d-9d80-9093e388afec\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7149243f8781d5a5e845821a8016bc06ec3678d69dfbde72d3429c48c64a98b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51d55feee1c9378afae0885cd063f39a2b4bd057db72c776941837d8d4098132\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6dc29635af9843109e3c200fc756d352e1a81b01dcc52a30a005291d1fa9f8d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e0b3573713f1b9d23177e5a1412d8db366ca718e6c19f6af9e234940505f2e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20382715fd9d79a3b60b16d950d6c4fe5b0d34ae14ad034f684d97762205263a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cfae59bc7d6183b2b5c6c04dd53b1bfc8b62abf90f67f74c7a56bf5039a5b50a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cfae59bc7d6183b2b5c6c04dd53b1bfc8b62abf90f67f74c7a56bf5039a5b50a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88968e9f639dc3664a87729128c531283b37b7c2da6aae70905f9f63a980fb54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88968e9f639dc3664a87729128c531283b37b7c2da6aae70905f9f63a980fb54\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://1ae0cbc996e8c2d24e8fbbf34b292d994bcc704c7b27fdc8b7fb7e64be0902ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1ae0cbc996e8c2d24e8fbbf34b292d994bcc704c7b27fdc8b7fb7e64be0902ca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:15Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:40Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:40 crc kubenswrapper[4875]: I1007 07:56:40.854271 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://455d92e6c0a43eabcbdc94940cb5bb0308a747e311fb79711b5ec8586d2f290c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e98da4809cdbbaa0bc7a3ed73df6eeb9260101f8336e16e3e93cba2be423f957\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:40Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:40 crc kubenswrapper[4875]: I1007 07:56:40.879500 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-8tcxj" event={"ID":"f7806e48-48e7-4680-af2e-e93a05003370","Type":"ContainerStarted","Data":"5eb9157d6f41a164c7cb952c983b441030a97a8f3b8009359440c5eba9846f26"} Oct 07 07:56:40 crc kubenswrapper[4875]: I1007 07:56:40.880990 4875 generic.go:334] "Generic (PLEG): container finished" podID="7021fc56-b485-4ca6-80e8-56665ade004f" containerID="ecf0876b0824ab4fd9c692b0455050ac0a2d61663f74f13f578aebd6f3869f3f" exitCode=0 Oct 07 07:56:40 crc kubenswrapper[4875]: I1007 07:56:40.881019 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-zk2kz" event={"ID":"7021fc56-b485-4ca6-80e8-56665ade004f","Type":"ContainerDied","Data":"ecf0876b0824ab4fd9c692b0455050ac0a2d61663f74f13f578aebd6f3869f3f"} Oct 07 07:56:40 crc kubenswrapper[4875]: I1007 07:56:40.897600 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-8tcxj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f7806e48-48e7-4680-af2e-e93a05003370\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e7cdfd8fed2f21e7063841e5382e668130288feffd52ef6fc5f9b5bc1894f2a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e7cdfd8fed2f21e7063841e5382e668130288feffd52ef6fc5f9b5bc1894f2a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-8tcxj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:40Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:40 crc kubenswrapper[4875]: I1007 07:56:40.937422 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ddx6l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0605d52-0cfc-4bcf-9218-1991257047cd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:39Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:39Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l9bwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:39Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ddx6l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:40Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:40 crc kubenswrapper[4875]: E1007 07:56:40.943993 4875 configmap.go:193] Couldn't get configMap openshift-image-registry/image-registry-certificates: failed to sync configmap cache: timed out waiting for the condition Oct 07 07:56:40 crc kubenswrapper[4875]: E1007 07:56:40.944069 4875 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/d0605d52-0cfc-4bcf-9218-1991257047cd-serviceca podName:d0605d52-0cfc-4bcf-9218-1991257047cd nodeName:}" failed. No retries permitted until 2025-10-07 07:56:41.444051499 +0000 UTC m=+26.403822042 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "serviceca" (UniqueName: "kubernetes.io/configmap/d0605d52-0cfc-4bcf-9218-1991257047cd-serviceca") pod "node-ca-ddx6l" (UID: "d0605d52-0cfc-4bcf-9218-1991257047cd") : failed to sync configmap cache: timed out waiting for the condition Oct 07 07:56:40 crc kubenswrapper[4875]: I1007 07:56:40.974269 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43e570e7b8391b0f38f3228dc0380ac8ee23e83f498989343bdee6a53c3ede41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:40Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:40 crc kubenswrapper[4875]: E1007 07:56:40.977951 4875 projected.go:288] Couldn't get configMap openshift-image-registry/kube-root-ca.crt: failed to sync configmap cache: timed out waiting for the condition Oct 07 07:56:40 crc kubenswrapper[4875]: E1007 07:56:40.978011 4875 projected.go:194] Error preparing data for projected volume kube-api-access-l9bwf for pod openshift-image-registry/node-ca-ddx6l: failed to sync configmap cache: timed out waiting for the condition Oct 07 07:56:40 crc kubenswrapper[4875]: E1007 07:56:40.978094 4875 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/d0605d52-0cfc-4bcf-9218-1991257047cd-kube-api-access-l9bwf podName:d0605d52-0cfc-4bcf-9218-1991257047cd nodeName:}" failed. No retries permitted until 2025-10-07 07:56:41.478064883 +0000 UTC m=+26.437835436 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-l9bwf" (UniqueName: "kubernetes.io/projected/d0605d52-0cfc-4bcf-9218-1991257047cd-kube-api-access-l9bwf") pod "node-ca-ddx6l" (UID: "d0605d52-0cfc-4bcf-9218-1991257047cd") : failed to sync configmap cache: timed out waiting for the condition Oct 07 07:56:40 crc kubenswrapper[4875]: I1007 07:56:40.984218 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Oct 07 07:56:41 crc kubenswrapper[4875]: I1007 07:56:41.033278 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01fe5bea18c473e716831e2a9a876a8517d6de78b9b870929ecc1c4397d33fb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:41Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:41 crc kubenswrapper[4875]: I1007 07:56:41.044768 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Oct 07 07:56:41 crc kubenswrapper[4875]: I1007 07:56:41.094067 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wc2jq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e5a790e1-c591-4cfc-930f-4805a923790b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0fda9fd366641ca5799c0ca6c7aa926af6b41609e157c11a15b2077d4a5ce3aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4s782\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wc2jq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:41Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:41 crc kubenswrapper[4875]: I1007 07:56:41.134651 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3928c10c-c3da-41eb-96b2-629d67cfb31f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://27e5a1d7a65296f23b224f9ace2760c32b6e1ca146cf7a42a601de22632250d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qd4f6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34b235fca1c383e868641a953bb908901bcd10528ee9fb668c1f71eb6e0e978a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qd4f6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hx68m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:41Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:41 crc kubenswrapper[4875]: I1007 07:56:41.172613 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"01cd6f59-4f16-41d3-b43b-cd3cb9efd9b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ca1c70fa4c2dd9e87cc15766e27c2735768f392d019d6004ae50c94595651c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4bed5cf79847998d3d72c69b2f5773d64affb1a1f13ecc3af99bda8307bb8d1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ba6ab5ec6a70a00059ba119970c6b139879c3c568475f2acf45b00f4ade9e22\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://671f4d3e518bf0230af61a6bfc3f70b8cea7f0e8f972ad605a78cded0306b084\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://671f4d3e518bf0230af61a6bfc3f70b8cea7f0e8f972ad605a78cded0306b084\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"message\\\":\\\"le observer\\\\nW1007 07:56:35.379471 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1007 07:56:35.379613 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1007 07:56:35.380419 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1733405231/tls.crt::/tmp/serving-cert-1733405231/tls.key\\\\\\\"\\\\nI1007 07:56:35.960385 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1007 07:56:35.963357 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1007 07:56:35.963380 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1007 07:56:35.963401 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1007 07:56:35.963407 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1007 07:56:35.978178 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1007 07:56:35.978231 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1007 07:56:35.978242 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI1007 07:56:35.978236 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1007 07:56:35.978254 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1007 07:56:35.978304 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1007 07:56:35.978312 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1007 07:56:35.978319 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1007 07:56:35.979012 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0b72e1d82ba1cfc118ea122535db5c358f503a17b497c21c481e1b56bb578e8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec502d866371b40ecc3c8728889725cc1c2d9c8be18dbce17935992980fc1b9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec502d866371b40ecc3c8728889725cc1c2d9c8be18dbce17935992980fc1b9a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:41Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:41 crc kubenswrapper[4875]: I1007 07:56:41.214819 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:41Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:41 crc kubenswrapper[4875]: I1007 07:56:41.252116 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:41Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:41 crc kubenswrapper[4875]: I1007 07:56:41.264863 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Oct 07 07:56:41 crc kubenswrapper[4875]: I1007 07:56:41.313377 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-9tw9m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f8b02a8a-b8d2-4097-b768-132e4c46938a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f6efce0afada950650dfd1a7345a5132509ead755458e53915fcf3335ffe73e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-9tw9m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:41Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:41 crc kubenswrapper[4875]: I1007 07:56:41.353511 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:41Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:41 crc kubenswrapper[4875]: I1007 07:56:41.380260 4875 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 07:56:41 crc kubenswrapper[4875]: I1007 07:56:41.383078 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:41 crc kubenswrapper[4875]: I1007 07:56:41.383119 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:41 crc kubenswrapper[4875]: I1007 07:56:41.383129 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:41 crc kubenswrapper[4875]: I1007 07:56:41.383282 4875 kubelet_node_status.go:76] "Attempting to register node" node="crc" Oct 07 07:56:41 crc kubenswrapper[4875]: I1007 07:56:41.400086 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-zk2kz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7021fc56-b485-4ca6-80e8-56665ade004f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d91377f30f2c7676df79d679bad49e0a162667214bd7e6ed4ad8ef3606a8752\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d91377f30f2c7676df79d679bad49e0a162667214bd7e6ed4ad8ef3606a8752\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecf0876b0824ab4fd9c692b0455050ac0a2d61663f74f13f578aebd6f3869f3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ecf0876b0824ab4fd9c692b0455050ac0a2d61663f74f13f578aebd6f3869f3f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-zk2kz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:41Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:41 crc kubenswrapper[4875]: I1007 07:56:41.446042 4875 kubelet_node_status.go:115] "Node was previously registered" node="crc" Oct 07 07:56:41 crc kubenswrapper[4875]: I1007 07:56:41.446440 4875 kubelet_node_status.go:79] "Successfully registered node" node="crc" Oct 07 07:56:41 crc kubenswrapper[4875]: I1007 07:56:41.447896 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:41 crc kubenswrapper[4875]: I1007 07:56:41.447943 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:41 crc kubenswrapper[4875]: I1007 07:56:41.447958 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:41 crc kubenswrapper[4875]: I1007 07:56:41.447976 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:41 crc kubenswrapper[4875]: I1007 07:56:41.447988 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:41Z","lastTransitionTime":"2025-10-07T07:56:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:41 crc kubenswrapper[4875]: I1007 07:56:41.458621 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/d0605d52-0cfc-4bcf-9218-1991257047cd-serviceca\") pod \"node-ca-ddx6l\" (UID: \"d0605d52-0cfc-4bcf-9218-1991257047cd\") " pod="openshift-image-registry/node-ca-ddx6l" Oct 07 07:56:41 crc kubenswrapper[4875]: I1007 07:56:41.460030 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/d0605d52-0cfc-4bcf-9218-1991257047cd-serviceca\") pod \"node-ca-ddx6l\" (UID: \"d0605d52-0cfc-4bcf-9218-1991257047cd\") " pod="openshift-image-registry/node-ca-ddx6l" Oct 07 07:56:41 crc kubenswrapper[4875]: E1007 07:56:41.463577 4875 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T07:56:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:41Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T07:56:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:41Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T07:56:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:41Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T07:56:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:41Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"e390eb55-bfef-4c82-ba02-53426a3fd939\\\",\\\"systemUUID\\\":\\\"288fc606-a984-4194-ac49-303e4a239cb9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:41Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:41 crc kubenswrapper[4875]: I1007 07:56:41.468079 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:41 crc kubenswrapper[4875]: I1007 07:56:41.468119 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:41 crc kubenswrapper[4875]: I1007 07:56:41.468135 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:41 crc kubenswrapper[4875]: I1007 07:56:41.468159 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:41 crc kubenswrapper[4875]: I1007 07:56:41.468175 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:41Z","lastTransitionTime":"2025-10-07T07:56:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:41 crc kubenswrapper[4875]: E1007 07:56:41.485871 4875 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T07:56:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:41Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T07:56:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:41Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T07:56:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:41Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T07:56:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:41Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"e390eb55-bfef-4c82-ba02-53426a3fd939\\\",\\\"systemUUID\\\":\\\"288fc606-a984-4194-ac49-303e4a239cb9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:41Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:41 crc kubenswrapper[4875]: I1007 07:56:41.485873 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-8tcxj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f7806e48-48e7-4680-af2e-e93a05003370\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e7cdfd8fed2f21e7063841e5382e668130288feffd52ef6fc5f9b5bc1894f2a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e7cdfd8fed2f21e7063841e5382e668130288feffd52ef6fc5f9b5bc1894f2a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-8tcxj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:41Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:41 crc kubenswrapper[4875]: I1007 07:56:41.489992 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:41 crc kubenswrapper[4875]: I1007 07:56:41.490054 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:41 crc kubenswrapper[4875]: I1007 07:56:41.490067 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:41 crc kubenswrapper[4875]: I1007 07:56:41.490086 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:41 crc kubenswrapper[4875]: I1007 07:56:41.490098 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:41Z","lastTransitionTime":"2025-10-07T07:56:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:41 crc kubenswrapper[4875]: E1007 07:56:41.506026 4875 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T07:56:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:41Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T07:56:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:41Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T07:56:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:41Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T07:56:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:41Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"e390eb55-bfef-4c82-ba02-53426a3fd939\\\",\\\"systemUUID\\\":\\\"288fc606-a984-4194-ac49-303e4a239cb9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:41Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:41 crc kubenswrapper[4875]: I1007 07:56:41.509516 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:41 crc kubenswrapper[4875]: I1007 07:56:41.509553 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:41 crc kubenswrapper[4875]: I1007 07:56:41.509562 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:41 crc kubenswrapper[4875]: I1007 07:56:41.509576 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:41 crc kubenswrapper[4875]: I1007 07:56:41.509587 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:41Z","lastTransitionTime":"2025-10-07T07:56:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:41 crc kubenswrapper[4875]: I1007 07:56:41.520616 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ddx6l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0605d52-0cfc-4bcf-9218-1991257047cd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:39Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:39Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l9bwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:39Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ddx6l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:41Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:41 crc kubenswrapper[4875]: E1007 07:56:41.542720 4875 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T07:56:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:41Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T07:56:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:41Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T07:56:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:41Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T07:56:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:41Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"e390eb55-bfef-4c82-ba02-53426a3fd939\\\",\\\"systemUUID\\\":\\\"288fc606-a984-4194-ac49-303e4a239cb9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:41Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:41 crc kubenswrapper[4875]: I1007 07:56:41.550345 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:41 crc kubenswrapper[4875]: I1007 07:56:41.550394 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:41 crc kubenswrapper[4875]: I1007 07:56:41.550405 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:41 crc kubenswrapper[4875]: I1007 07:56:41.550420 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:41 crc kubenswrapper[4875]: I1007 07:56:41.550430 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:41Z","lastTransitionTime":"2025-10-07T07:56:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:41 crc kubenswrapper[4875]: I1007 07:56:41.555081 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62ae4c53-7cf1-4229-84b4-045397dbcfba\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ae8881986c1b2abd841adfadbac7dc3d73096c5366485350536c08f478f9762\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://05f20f2103458077317614bcd5338d803c52c67e7dfc562103c817d33fa5bc79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f2213a20d36b8a125c6004957d5be402e0b85bbc2165ebd964ab499dfffb877\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://841f5c2218ad3912bf23dc2df80f24ca696829ba8e2c2d9722264688b25f0846\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:15Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:41Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:41 crc kubenswrapper[4875]: I1007 07:56:41.559444 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l9bwf\" (UniqueName: \"kubernetes.io/projected/d0605d52-0cfc-4bcf-9218-1991257047cd-kube-api-access-l9bwf\") pod \"node-ca-ddx6l\" (UID: \"d0605d52-0cfc-4bcf-9218-1991257047cd\") " pod="openshift-image-registry/node-ca-ddx6l" Oct 07 07:56:41 crc kubenswrapper[4875]: E1007 07:56:41.562929 4875 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T07:56:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:41Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T07:56:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:41Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T07:56:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:41Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T07:56:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:41Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"e390eb55-bfef-4c82-ba02-53426a3fd939\\\",\\\"systemUUID\\\":\\\"288fc606-a984-4194-ac49-303e4a239cb9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:41Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:41 crc kubenswrapper[4875]: E1007 07:56:41.563053 4875 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Oct 07 07:56:41 crc kubenswrapper[4875]: I1007 07:56:41.564924 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:41 crc kubenswrapper[4875]: I1007 07:56:41.564947 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:41 crc kubenswrapper[4875]: I1007 07:56:41.564955 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:41 crc kubenswrapper[4875]: I1007 07:56:41.564970 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:41 crc kubenswrapper[4875]: I1007 07:56:41.564979 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:41Z","lastTransitionTime":"2025-10-07T07:56:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:41 crc kubenswrapper[4875]: I1007 07:56:41.571533 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l9bwf\" (UniqueName: \"kubernetes.io/projected/d0605d52-0cfc-4bcf-9218-1991257047cd-kube-api-access-l9bwf\") pod \"node-ca-ddx6l\" (UID: \"d0605d52-0cfc-4bcf-9218-1991257047cd\") " pod="openshift-image-registry/node-ca-ddx6l" Oct 07 07:56:41 crc kubenswrapper[4875]: I1007 07:56:41.598213 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3527b3b9-1734-481d-9d80-9093e388afec\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7149243f8781d5a5e845821a8016bc06ec3678d69dfbde72d3429c48c64a98b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51d55feee1c9378afae0885cd063f39a2b4bd057db72c776941837d8d4098132\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6dc29635af9843109e3c200fc756d352e1a81b01dcc52a30a005291d1fa9f8d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e0b3573713f1b9d23177e5a1412d8db366ca718e6c19f6af9e234940505f2e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20382715fd9d79a3b60b16d950d6c4fe5b0d34ae14ad034f684d97762205263a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cfae59bc7d6183b2b5c6c04dd53b1bfc8b62abf90f67f74c7a56bf5039a5b50a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cfae59bc7d6183b2b5c6c04dd53b1bfc8b62abf90f67f74c7a56bf5039a5b50a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88968e9f639dc3664a87729128c531283b37b7c2da6aae70905f9f63a980fb54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88968e9f639dc3664a87729128c531283b37b7c2da6aae70905f9f63a980fb54\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://1ae0cbc996e8c2d24e8fbbf34b292d994bcc704c7b27fdc8b7fb7e64be0902ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1ae0cbc996e8c2d24e8fbbf34b292d994bcc704c7b27fdc8b7fb7e64be0902ca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:15Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:41Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:41 crc kubenswrapper[4875]: I1007 07:56:41.634172 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://455d92e6c0a43eabcbdc94940cb5bb0308a747e311fb79711b5ec8586d2f290c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e98da4809cdbbaa0bc7a3ed73df6eeb9260101f8336e16e3e93cba2be423f957\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:41Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:41 crc kubenswrapper[4875]: I1007 07:56:41.667101 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:41 crc kubenswrapper[4875]: I1007 07:56:41.667146 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:41 crc kubenswrapper[4875]: I1007 07:56:41.667157 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:41 crc kubenswrapper[4875]: I1007 07:56:41.667173 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:41 crc kubenswrapper[4875]: I1007 07:56:41.667182 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:41Z","lastTransitionTime":"2025-10-07T07:56:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:41 crc kubenswrapper[4875]: I1007 07:56:41.696779 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 07:56:41 crc kubenswrapper[4875]: I1007 07:56:41.696803 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 07:56:41 crc kubenswrapper[4875]: E1007 07:56:41.696911 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 07:56:41 crc kubenswrapper[4875]: E1007 07:56:41.696991 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 07:56:41 crc kubenswrapper[4875]: I1007 07:56:41.769779 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:41 crc kubenswrapper[4875]: I1007 07:56:41.769818 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:41 crc kubenswrapper[4875]: I1007 07:56:41.769826 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:41 crc kubenswrapper[4875]: I1007 07:56:41.769840 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:41 crc kubenswrapper[4875]: I1007 07:56:41.769849 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:41Z","lastTransitionTime":"2025-10-07T07:56:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:41 crc kubenswrapper[4875]: I1007 07:56:41.841942 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-ddx6l" Oct 07 07:56:41 crc kubenswrapper[4875]: W1007 07:56:41.858251 4875 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd0605d52_0cfc_4bcf_9218_1991257047cd.slice/crio-a49b10084d37e35bf1d25b96a7accb66b6dc81e10031ea109afe3b768454991a WatchSource:0}: Error finding container a49b10084d37e35bf1d25b96a7accb66b6dc81e10031ea109afe3b768454991a: Status 404 returned error can't find the container with id a49b10084d37e35bf1d25b96a7accb66b6dc81e10031ea109afe3b768454991a Oct 07 07:56:41 crc kubenswrapper[4875]: I1007 07:56:41.872644 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:41 crc kubenswrapper[4875]: I1007 07:56:41.872693 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:41 crc kubenswrapper[4875]: I1007 07:56:41.872712 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:41 crc kubenswrapper[4875]: I1007 07:56:41.872735 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:41 crc kubenswrapper[4875]: I1007 07:56:41.872753 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:41Z","lastTransitionTime":"2025-10-07T07:56:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:41 crc kubenswrapper[4875]: I1007 07:56:41.889985 4875 generic.go:334] "Generic (PLEG): container finished" podID="7021fc56-b485-4ca6-80e8-56665ade004f" containerID="9714afc60a059f6aa35d965ae1e15ea03e01cf9162a44d632ceed9d772e966bf" exitCode=0 Oct 07 07:56:41 crc kubenswrapper[4875]: I1007 07:56:41.890081 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-zk2kz" event={"ID":"7021fc56-b485-4ca6-80e8-56665ade004f","Type":"ContainerDied","Data":"9714afc60a059f6aa35d965ae1e15ea03e01cf9162a44d632ceed9d772e966bf"} Oct 07 07:56:41 crc kubenswrapper[4875]: I1007 07:56:41.896011 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-ddx6l" event={"ID":"d0605d52-0cfc-4bcf-9218-1991257047cd","Type":"ContainerStarted","Data":"a49b10084d37e35bf1d25b96a7accb66b6dc81e10031ea109afe3b768454991a"} Oct 07 07:56:41 crc kubenswrapper[4875]: I1007 07:56:41.905683 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3928c10c-c3da-41eb-96b2-629d67cfb31f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://27e5a1d7a65296f23b224f9ace2760c32b6e1ca146cf7a42a601de22632250d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qd4f6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34b235fca1c383e868641a953bb908901bcd10528ee9fb668c1f71eb6e0e978a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qd4f6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hx68m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:41Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:41 crc kubenswrapper[4875]: I1007 07:56:41.920998 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"01cd6f59-4f16-41d3-b43b-cd3cb9efd9b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ca1c70fa4c2dd9e87cc15766e27c2735768f392d019d6004ae50c94595651c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4bed5cf79847998d3d72c69b2f5773d64affb1a1f13ecc3af99bda8307bb8d1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ba6ab5ec6a70a00059ba119970c6b139879c3c568475f2acf45b00f4ade9e22\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://671f4d3e518bf0230af61a6bfc3f70b8cea7f0e8f972ad605a78cded0306b084\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://671f4d3e518bf0230af61a6bfc3f70b8cea7f0e8f972ad605a78cded0306b084\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"message\\\":\\\"le observer\\\\nW1007 07:56:35.379471 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1007 07:56:35.379613 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1007 07:56:35.380419 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1733405231/tls.crt::/tmp/serving-cert-1733405231/tls.key\\\\\\\"\\\\nI1007 07:56:35.960385 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1007 07:56:35.963357 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1007 07:56:35.963380 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1007 07:56:35.963401 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1007 07:56:35.963407 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1007 07:56:35.978178 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1007 07:56:35.978231 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1007 07:56:35.978242 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI1007 07:56:35.978236 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1007 07:56:35.978254 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1007 07:56:35.978304 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1007 07:56:35.978312 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1007 07:56:35.978319 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1007 07:56:35.979012 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0b72e1d82ba1cfc118ea122535db5c358f503a17b497c21c481e1b56bb578e8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec502d866371b40ecc3c8728889725cc1c2d9c8be18dbce17935992980fc1b9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec502d866371b40ecc3c8728889725cc1c2d9c8be18dbce17935992980fc1b9a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:41Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:41 crc kubenswrapper[4875]: I1007 07:56:41.936737 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:41Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:41 crc kubenswrapper[4875]: I1007 07:56:41.954481 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43e570e7b8391b0f38f3228dc0380ac8ee23e83f498989343bdee6a53c3ede41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:41Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:41 crc kubenswrapper[4875]: I1007 07:56:41.966987 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01fe5bea18c473e716831e2a9a876a8517d6de78b9b870929ecc1c4397d33fb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:41Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:41 crc kubenswrapper[4875]: I1007 07:56:41.975741 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:41 crc kubenswrapper[4875]: I1007 07:56:41.975774 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:41 crc kubenswrapper[4875]: I1007 07:56:41.975784 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:41 crc kubenswrapper[4875]: I1007 07:56:41.975797 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:41 crc kubenswrapper[4875]: I1007 07:56:41.975805 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:41Z","lastTransitionTime":"2025-10-07T07:56:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:41 crc kubenswrapper[4875]: I1007 07:56:41.980868 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wc2jq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e5a790e1-c591-4cfc-930f-4805a923790b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0fda9fd366641ca5799c0ca6c7aa926af6b41609e157c11a15b2077d4a5ce3aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4s782\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wc2jq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:41Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:41 crc kubenswrapper[4875]: I1007 07:56:41.992552 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:41Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:42 crc kubenswrapper[4875]: I1007 07:56:42.001636 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-9tw9m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f8b02a8a-b8d2-4097-b768-132e4c46938a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f6efce0afada950650dfd1a7345a5132509ead755458e53915fcf3335ffe73e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-9tw9m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:42Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:42 crc kubenswrapper[4875]: I1007 07:56:42.013455 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:42Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:42 crc kubenswrapper[4875]: I1007 07:56:42.035199 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-zk2kz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7021fc56-b485-4ca6-80e8-56665ade004f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d91377f30f2c7676df79d679bad49e0a162667214bd7e6ed4ad8ef3606a8752\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d91377f30f2c7676df79d679bad49e0a162667214bd7e6ed4ad8ef3606a8752\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecf0876b0824ab4fd9c692b0455050ac0a2d61663f74f13f578aebd6f3869f3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ecf0876b0824ab4fd9c692b0455050ac0a2d61663f74f13f578aebd6f3869f3f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9714afc60a059f6aa35d965ae1e15ea03e01cf9162a44d632ceed9d772e966bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9714afc60a059f6aa35d965ae1e15ea03e01cf9162a44d632ceed9d772e966bf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-zk2kz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:42Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:42 crc kubenswrapper[4875]: I1007 07:56:42.074235 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62ae4c53-7cf1-4229-84b4-045397dbcfba\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ae8881986c1b2abd841adfadbac7dc3d73096c5366485350536c08f478f9762\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://05f20f2103458077317614bcd5338d803c52c67e7dfc562103c817d33fa5bc79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f2213a20d36b8a125c6004957d5be402e0b85bbc2165ebd964ab499dfffb877\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://841f5c2218ad3912bf23dc2df80f24ca696829ba8e2c2d9722264688b25f0846\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:15Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:42Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:42 crc kubenswrapper[4875]: I1007 07:56:42.077660 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:42 crc kubenswrapper[4875]: I1007 07:56:42.077681 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:42 crc kubenswrapper[4875]: I1007 07:56:42.077689 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:42 crc kubenswrapper[4875]: I1007 07:56:42.077702 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:42 crc kubenswrapper[4875]: I1007 07:56:42.077711 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:42Z","lastTransitionTime":"2025-10-07T07:56:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:42 crc kubenswrapper[4875]: I1007 07:56:42.119791 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3527b3b9-1734-481d-9d80-9093e388afec\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7149243f8781d5a5e845821a8016bc06ec3678d69dfbde72d3429c48c64a98b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51d55feee1c9378afae0885cd063f39a2b4bd057db72c776941837d8d4098132\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6dc29635af9843109e3c200fc756d352e1a81b01dcc52a30a005291d1fa9f8d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e0b3573713f1b9d23177e5a1412d8db366ca718e6c19f6af9e234940505f2e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20382715fd9d79a3b60b16d950d6c4fe5b0d34ae14ad034f684d97762205263a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cfae59bc7d6183b2b5c6c04dd53b1bfc8b62abf90f67f74c7a56bf5039a5b50a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cfae59bc7d6183b2b5c6c04dd53b1bfc8b62abf90f67f74c7a56bf5039a5b50a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88968e9f639dc3664a87729128c531283b37b7c2da6aae70905f9f63a980fb54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88968e9f639dc3664a87729128c531283b37b7c2da6aae70905f9f63a980fb54\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://1ae0cbc996e8c2d24e8fbbf34b292d994bcc704c7b27fdc8b7fb7e64be0902ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1ae0cbc996e8c2d24e8fbbf34b292d994bcc704c7b27fdc8b7fb7e64be0902ca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:15Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:42Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:42 crc kubenswrapper[4875]: I1007 07:56:42.155603 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://455d92e6c0a43eabcbdc94940cb5bb0308a747e311fb79711b5ec8586d2f290c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e98da4809cdbbaa0bc7a3ed73df6eeb9260101f8336e16e3e93cba2be423f957\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:42Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:42 crc kubenswrapper[4875]: I1007 07:56:42.180313 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:42 crc kubenswrapper[4875]: I1007 07:56:42.180374 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:42 crc kubenswrapper[4875]: I1007 07:56:42.180390 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:42 crc kubenswrapper[4875]: I1007 07:56:42.180408 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:42 crc kubenswrapper[4875]: I1007 07:56:42.180454 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:42Z","lastTransitionTime":"2025-10-07T07:56:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:42 crc kubenswrapper[4875]: I1007 07:56:42.201902 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-8tcxj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f7806e48-48e7-4680-af2e-e93a05003370\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e7cdfd8fed2f21e7063841e5382e668130288feffd52ef6fc5f9b5bc1894f2a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e7cdfd8fed2f21e7063841e5382e668130288feffd52ef6fc5f9b5bc1894f2a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-8tcxj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:42Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:42 crc kubenswrapper[4875]: I1007 07:56:42.235334 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ddx6l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0605d52-0cfc-4bcf-9218-1991257047cd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:39Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:39Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l9bwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:39Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ddx6l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:42Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:42 crc kubenswrapper[4875]: I1007 07:56:42.282983 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:42 crc kubenswrapper[4875]: I1007 07:56:42.283020 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:42 crc kubenswrapper[4875]: I1007 07:56:42.283030 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:42 crc kubenswrapper[4875]: I1007 07:56:42.283044 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:42 crc kubenswrapper[4875]: I1007 07:56:42.283054 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:42Z","lastTransitionTime":"2025-10-07T07:56:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:42 crc kubenswrapper[4875]: I1007 07:56:42.384725 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:42 crc kubenswrapper[4875]: I1007 07:56:42.384761 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:42 crc kubenswrapper[4875]: I1007 07:56:42.384769 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:42 crc kubenswrapper[4875]: I1007 07:56:42.384801 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:42 crc kubenswrapper[4875]: I1007 07:56:42.384810 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:42Z","lastTransitionTime":"2025-10-07T07:56:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:42 crc kubenswrapper[4875]: I1007 07:56:42.487007 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:42 crc kubenswrapper[4875]: I1007 07:56:42.487049 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:42 crc kubenswrapper[4875]: I1007 07:56:42.487058 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:42 crc kubenswrapper[4875]: I1007 07:56:42.487072 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:42 crc kubenswrapper[4875]: I1007 07:56:42.487081 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:42Z","lastTransitionTime":"2025-10-07T07:56:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:42 crc kubenswrapper[4875]: I1007 07:56:42.589615 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:42 crc kubenswrapper[4875]: I1007 07:56:42.589645 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:42 crc kubenswrapper[4875]: I1007 07:56:42.589653 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:42 crc kubenswrapper[4875]: I1007 07:56:42.589666 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:42 crc kubenswrapper[4875]: I1007 07:56:42.589675 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:42Z","lastTransitionTime":"2025-10-07T07:56:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:42 crc kubenswrapper[4875]: I1007 07:56:42.691783 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:42 crc kubenswrapper[4875]: I1007 07:56:42.691829 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:42 crc kubenswrapper[4875]: I1007 07:56:42.691840 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:42 crc kubenswrapper[4875]: I1007 07:56:42.691856 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:42 crc kubenswrapper[4875]: I1007 07:56:42.691892 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:42Z","lastTransitionTime":"2025-10-07T07:56:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:42 crc kubenswrapper[4875]: I1007 07:56:42.697031 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 07:56:42 crc kubenswrapper[4875]: E1007 07:56:42.697152 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 07:56:42 crc kubenswrapper[4875]: I1007 07:56:42.794410 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:42 crc kubenswrapper[4875]: I1007 07:56:42.794448 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:42 crc kubenswrapper[4875]: I1007 07:56:42.794459 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:42 crc kubenswrapper[4875]: I1007 07:56:42.794475 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:42 crc kubenswrapper[4875]: I1007 07:56:42.794486 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:42Z","lastTransitionTime":"2025-10-07T07:56:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:42 crc kubenswrapper[4875]: I1007 07:56:42.896822 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:42 crc kubenswrapper[4875]: I1007 07:56:42.896933 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:42 crc kubenswrapper[4875]: I1007 07:56:42.896945 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:42 crc kubenswrapper[4875]: I1007 07:56:42.896961 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:42 crc kubenswrapper[4875]: I1007 07:56:42.896972 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:42Z","lastTransitionTime":"2025-10-07T07:56:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:42 crc kubenswrapper[4875]: I1007 07:56:42.900951 4875 generic.go:334] "Generic (PLEG): container finished" podID="7021fc56-b485-4ca6-80e8-56665ade004f" containerID="c1f6109783534c4c80c81daf9eb0f04a9fbced0b143aeaa0c76683b9d2d38669" exitCode=0 Oct 07 07:56:42 crc kubenswrapper[4875]: I1007 07:56:42.901006 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-zk2kz" event={"ID":"7021fc56-b485-4ca6-80e8-56665ade004f","Type":"ContainerDied","Data":"c1f6109783534c4c80c81daf9eb0f04a9fbced0b143aeaa0c76683b9d2d38669"} Oct 07 07:56:42 crc kubenswrapper[4875]: I1007 07:56:42.903029 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-ddx6l" event={"ID":"d0605d52-0cfc-4bcf-9218-1991257047cd","Type":"ContainerStarted","Data":"bdc7823b5b4f40adc245f9650864ebcb03f791dcbd6167f41c362bcdcc3a6655"} Oct 07 07:56:42 crc kubenswrapper[4875]: I1007 07:56:42.918523 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-8tcxj" event={"ID":"f7806e48-48e7-4680-af2e-e93a05003370","Type":"ContainerStarted","Data":"2b3b1269130a0a1192546bec4aa5b2d4bfd65f381f6cd3948dd54d49b482e6d0"} Oct 07 07:56:42 crc kubenswrapper[4875]: I1007 07:56:42.919049 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:42Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:42 crc kubenswrapper[4875]: I1007 07:56:42.948649 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43e570e7b8391b0f38f3228dc0380ac8ee23e83f498989343bdee6a53c3ede41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:42Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:42 crc kubenswrapper[4875]: I1007 07:56:42.963414 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01fe5bea18c473e716831e2a9a876a8517d6de78b9b870929ecc1c4397d33fb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:42Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:42 crc kubenswrapper[4875]: I1007 07:56:42.976572 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wc2jq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e5a790e1-c591-4cfc-930f-4805a923790b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0fda9fd366641ca5799c0ca6c7aa926af6b41609e157c11a15b2077d4a5ce3aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4s782\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wc2jq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:42Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:42 crc kubenswrapper[4875]: I1007 07:56:42.993902 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3928c10c-c3da-41eb-96b2-629d67cfb31f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://27e5a1d7a65296f23b224f9ace2760c32b6e1ca146cf7a42a601de22632250d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qd4f6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34b235fca1c383e868641a953bb908901bcd10528ee9fb668c1f71eb6e0e978a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qd4f6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hx68m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:42Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:43 crc kubenswrapper[4875]: I1007 07:56:43.004082 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:43 crc kubenswrapper[4875]: I1007 07:56:43.004127 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:43 crc kubenswrapper[4875]: I1007 07:56:43.004139 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:43 crc kubenswrapper[4875]: I1007 07:56:43.004157 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:43 crc kubenswrapper[4875]: I1007 07:56:43.004172 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:43Z","lastTransitionTime":"2025-10-07T07:56:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:43 crc kubenswrapper[4875]: I1007 07:56:43.009675 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"01cd6f59-4f16-41d3-b43b-cd3cb9efd9b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ca1c70fa4c2dd9e87cc15766e27c2735768f392d019d6004ae50c94595651c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4bed5cf79847998d3d72c69b2f5773d64affb1a1f13ecc3af99bda8307bb8d1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ba6ab5ec6a70a00059ba119970c6b139879c3c568475f2acf45b00f4ade9e22\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://671f4d3e518bf0230af61a6bfc3f70b8cea7f0e8f972ad605a78cded0306b084\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://671f4d3e518bf0230af61a6bfc3f70b8cea7f0e8f972ad605a78cded0306b084\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"message\\\":\\\"le observer\\\\nW1007 07:56:35.379471 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1007 07:56:35.379613 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1007 07:56:35.380419 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1733405231/tls.crt::/tmp/serving-cert-1733405231/tls.key\\\\\\\"\\\\nI1007 07:56:35.960385 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1007 07:56:35.963357 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1007 07:56:35.963380 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1007 07:56:35.963401 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1007 07:56:35.963407 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1007 07:56:35.978178 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1007 07:56:35.978231 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1007 07:56:35.978242 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI1007 07:56:35.978236 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1007 07:56:35.978254 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1007 07:56:35.978304 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1007 07:56:35.978312 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1007 07:56:35.978319 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1007 07:56:35.979012 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0b72e1d82ba1cfc118ea122535db5c358f503a17b497c21c481e1b56bb578e8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec502d866371b40ecc3c8728889725cc1c2d9c8be18dbce17935992980fc1b9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec502d866371b40ecc3c8728889725cc1c2d9c8be18dbce17935992980fc1b9a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:43Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:43 crc kubenswrapper[4875]: I1007 07:56:43.023330 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:43Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:43 crc kubenswrapper[4875]: I1007 07:56:43.033219 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-9tw9m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f8b02a8a-b8d2-4097-b768-132e4c46938a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f6efce0afada950650dfd1a7345a5132509ead755458e53915fcf3335ffe73e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-9tw9m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:43Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:43 crc kubenswrapper[4875]: I1007 07:56:43.052304 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-zk2kz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7021fc56-b485-4ca6-80e8-56665ade004f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d91377f30f2c7676df79d679bad49e0a162667214bd7e6ed4ad8ef3606a8752\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d91377f30f2c7676df79d679bad49e0a162667214bd7e6ed4ad8ef3606a8752\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecf0876b0824ab4fd9c692b0455050ac0a2d61663f74f13f578aebd6f3869f3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ecf0876b0824ab4fd9c692b0455050ac0a2d61663f74f13f578aebd6f3869f3f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9714afc60a059f6aa35d965ae1e15ea03e01cf9162a44d632ceed9d772e966bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9714afc60a059f6aa35d965ae1e15ea03e01cf9162a44d632ceed9d772e966bf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1f6109783534c4c80c81daf9eb0f04a9fbced0b143aeaa0c76683b9d2d38669\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c1f6109783534c4c80c81daf9eb0f04a9fbced0b143aeaa0c76683b9d2d38669\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-zk2kz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:43Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:43 crc kubenswrapper[4875]: I1007 07:56:43.067394 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:43Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:43 crc kubenswrapper[4875]: I1007 07:56:43.086848 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://455d92e6c0a43eabcbdc94940cb5bb0308a747e311fb79711b5ec8586d2f290c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e98da4809cdbbaa0bc7a3ed73df6eeb9260101f8336e16e3e93cba2be423f957\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:43Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:43 crc kubenswrapper[4875]: I1007 07:56:43.105290 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-8tcxj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f7806e48-48e7-4680-af2e-e93a05003370\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e7cdfd8fed2f21e7063841e5382e668130288feffd52ef6fc5f9b5bc1894f2a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e7cdfd8fed2f21e7063841e5382e668130288feffd52ef6fc5f9b5bc1894f2a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-8tcxj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:43Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:43 crc kubenswrapper[4875]: I1007 07:56:43.107198 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:43 crc kubenswrapper[4875]: I1007 07:56:43.107243 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:43 crc kubenswrapper[4875]: I1007 07:56:43.107254 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:43 crc kubenswrapper[4875]: I1007 07:56:43.107273 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:43 crc kubenswrapper[4875]: I1007 07:56:43.107284 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:43Z","lastTransitionTime":"2025-10-07T07:56:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:43 crc kubenswrapper[4875]: I1007 07:56:43.119496 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ddx6l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0605d52-0cfc-4bcf-9218-1991257047cd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:39Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:39Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l9bwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:39Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ddx6l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:43Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:43 crc kubenswrapper[4875]: I1007 07:56:43.139505 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62ae4c53-7cf1-4229-84b4-045397dbcfba\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ae8881986c1b2abd841adfadbac7dc3d73096c5366485350536c08f478f9762\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://05f20f2103458077317614bcd5338d803c52c67e7dfc562103c817d33fa5bc79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f2213a20d36b8a125c6004957d5be402e0b85bbc2165ebd964ab499dfffb877\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://841f5c2218ad3912bf23dc2df80f24ca696829ba8e2c2d9722264688b25f0846\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:15Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:43Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:43 crc kubenswrapper[4875]: I1007 07:56:43.161660 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3527b3b9-1734-481d-9d80-9093e388afec\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7149243f8781d5a5e845821a8016bc06ec3678d69dfbde72d3429c48c64a98b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51d55feee1c9378afae0885cd063f39a2b4bd057db72c776941837d8d4098132\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6dc29635af9843109e3c200fc756d352e1a81b01dcc52a30a005291d1fa9f8d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e0b3573713f1b9d23177e5a1412d8db366ca718e6c19f6af9e234940505f2e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20382715fd9d79a3b60b16d950d6c4fe5b0d34ae14ad034f684d97762205263a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cfae59bc7d6183b2b5c6c04dd53b1bfc8b62abf90f67f74c7a56bf5039a5b50a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cfae59bc7d6183b2b5c6c04dd53b1bfc8b62abf90f67f74c7a56bf5039a5b50a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88968e9f639dc3664a87729128c531283b37b7c2da6aae70905f9f63a980fb54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88968e9f639dc3664a87729128c531283b37b7c2da6aae70905f9f63a980fb54\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://1ae0cbc996e8c2d24e8fbbf34b292d994bcc704c7b27fdc8b7fb7e64be0902ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1ae0cbc996e8c2d24e8fbbf34b292d994bcc704c7b27fdc8b7fb7e64be0902ca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:15Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:43Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:43 crc kubenswrapper[4875]: I1007 07:56:43.179296 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:43Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:43 crc kubenswrapper[4875]: I1007 07:56:43.195608 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-zk2kz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7021fc56-b485-4ca6-80e8-56665ade004f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d91377f30f2c7676df79d679bad49e0a162667214bd7e6ed4ad8ef3606a8752\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d91377f30f2c7676df79d679bad49e0a162667214bd7e6ed4ad8ef3606a8752\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecf0876b0824ab4fd9c692b0455050ac0a2d61663f74f13f578aebd6f3869f3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ecf0876b0824ab4fd9c692b0455050ac0a2d61663f74f13f578aebd6f3869f3f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9714afc60a059f6aa35d965ae1e15ea03e01cf9162a44d632ceed9d772e966bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9714afc60a059f6aa35d965ae1e15ea03e01cf9162a44d632ceed9d772e966bf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1f6109783534c4c80c81daf9eb0f04a9fbced0b143aeaa0c76683b9d2d38669\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c1f6109783534c4c80c81daf9eb0f04a9fbced0b143aeaa0c76683b9d2d38669\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-zk2kz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:43Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:43 crc kubenswrapper[4875]: I1007 07:56:43.210921 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:43 crc kubenswrapper[4875]: I1007 07:56:43.211001 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:43 crc kubenswrapper[4875]: I1007 07:56:43.211016 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:43 crc kubenswrapper[4875]: I1007 07:56:43.211053 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:43 crc kubenswrapper[4875]: I1007 07:56:43.211069 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:43Z","lastTransitionTime":"2025-10-07T07:56:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:43 crc kubenswrapper[4875]: I1007 07:56:43.215245 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-8tcxj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f7806e48-48e7-4680-af2e-e93a05003370\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e7cdfd8fed2f21e7063841e5382e668130288feffd52ef6fc5f9b5bc1894f2a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e7cdfd8fed2f21e7063841e5382e668130288feffd52ef6fc5f9b5bc1894f2a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-8tcxj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:43Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:43 crc kubenswrapper[4875]: I1007 07:56:43.227901 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ddx6l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0605d52-0cfc-4bcf-9218-1991257047cd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bdc7823b5b4f40adc245f9650864ebcb03f791dcbd6167f41c362bcdcc3a6655\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l9bwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:39Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ddx6l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:43Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:43 crc kubenswrapper[4875]: I1007 07:56:43.243229 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62ae4c53-7cf1-4229-84b4-045397dbcfba\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ae8881986c1b2abd841adfadbac7dc3d73096c5366485350536c08f478f9762\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://05f20f2103458077317614bcd5338d803c52c67e7dfc562103c817d33fa5bc79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f2213a20d36b8a125c6004957d5be402e0b85bbc2165ebd964ab499dfffb877\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://841f5c2218ad3912bf23dc2df80f24ca696829ba8e2c2d9722264688b25f0846\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:15Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:43Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:43 crc kubenswrapper[4875]: I1007 07:56:43.260828 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3527b3b9-1734-481d-9d80-9093e388afec\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7149243f8781d5a5e845821a8016bc06ec3678d69dfbde72d3429c48c64a98b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51d55feee1c9378afae0885cd063f39a2b4bd057db72c776941837d8d4098132\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6dc29635af9843109e3c200fc756d352e1a81b01dcc52a30a005291d1fa9f8d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e0b3573713f1b9d23177e5a1412d8db366ca718e6c19f6af9e234940505f2e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20382715fd9d79a3b60b16d950d6c4fe5b0d34ae14ad034f684d97762205263a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cfae59bc7d6183b2b5c6c04dd53b1bfc8b62abf90f67f74c7a56bf5039a5b50a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cfae59bc7d6183b2b5c6c04dd53b1bfc8b62abf90f67f74c7a56bf5039a5b50a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88968e9f639dc3664a87729128c531283b37b7c2da6aae70905f9f63a980fb54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88968e9f639dc3664a87729128c531283b37b7c2da6aae70905f9f63a980fb54\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://1ae0cbc996e8c2d24e8fbbf34b292d994bcc704c7b27fdc8b7fb7e64be0902ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1ae0cbc996e8c2d24e8fbbf34b292d994bcc704c7b27fdc8b7fb7e64be0902ca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:15Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:43Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:43 crc kubenswrapper[4875]: I1007 07:56:43.276321 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://455d92e6c0a43eabcbdc94940cb5bb0308a747e311fb79711b5ec8586d2f290c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e98da4809cdbbaa0bc7a3ed73df6eeb9260101f8336e16e3e93cba2be423f957\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:43Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:43 crc kubenswrapper[4875]: I1007 07:56:43.291019 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43e570e7b8391b0f38f3228dc0380ac8ee23e83f498989343bdee6a53c3ede41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:43Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:43 crc kubenswrapper[4875]: I1007 07:56:43.305838 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01fe5bea18c473e716831e2a9a876a8517d6de78b9b870929ecc1c4397d33fb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:43Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:43 crc kubenswrapper[4875]: I1007 07:56:43.313692 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:43 crc kubenswrapper[4875]: I1007 07:56:43.313755 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:43 crc kubenswrapper[4875]: I1007 07:56:43.313774 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:43 crc kubenswrapper[4875]: I1007 07:56:43.313800 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:43 crc kubenswrapper[4875]: I1007 07:56:43.313815 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:43Z","lastTransitionTime":"2025-10-07T07:56:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:43 crc kubenswrapper[4875]: I1007 07:56:43.320215 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wc2jq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e5a790e1-c591-4cfc-930f-4805a923790b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0fda9fd366641ca5799c0ca6c7aa926af6b41609e157c11a15b2077d4a5ce3aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4s782\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wc2jq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:43Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:43 crc kubenswrapper[4875]: I1007 07:56:43.331569 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3928c10c-c3da-41eb-96b2-629d67cfb31f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://27e5a1d7a65296f23b224f9ace2760c32b6e1ca146cf7a42a601de22632250d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qd4f6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34b235fca1c383e868641a953bb908901bcd10528ee9fb668c1f71eb6e0e978a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qd4f6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hx68m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:43Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:43 crc kubenswrapper[4875]: I1007 07:56:43.348309 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"01cd6f59-4f16-41d3-b43b-cd3cb9efd9b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ca1c70fa4c2dd9e87cc15766e27c2735768f392d019d6004ae50c94595651c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4bed5cf79847998d3d72c69b2f5773d64affb1a1f13ecc3af99bda8307bb8d1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ba6ab5ec6a70a00059ba119970c6b139879c3c568475f2acf45b00f4ade9e22\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://671f4d3e518bf0230af61a6bfc3f70b8cea7f0e8f972ad605a78cded0306b084\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://671f4d3e518bf0230af61a6bfc3f70b8cea7f0e8f972ad605a78cded0306b084\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"message\\\":\\\"le observer\\\\nW1007 07:56:35.379471 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1007 07:56:35.379613 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1007 07:56:35.380419 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1733405231/tls.crt::/tmp/serving-cert-1733405231/tls.key\\\\\\\"\\\\nI1007 07:56:35.960385 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1007 07:56:35.963357 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1007 07:56:35.963380 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1007 07:56:35.963401 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1007 07:56:35.963407 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1007 07:56:35.978178 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1007 07:56:35.978231 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1007 07:56:35.978242 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI1007 07:56:35.978236 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1007 07:56:35.978254 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1007 07:56:35.978304 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1007 07:56:35.978312 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1007 07:56:35.978319 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1007 07:56:35.979012 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0b72e1d82ba1cfc118ea122535db5c358f503a17b497c21c481e1b56bb578e8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec502d866371b40ecc3c8728889725cc1c2d9c8be18dbce17935992980fc1b9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec502d866371b40ecc3c8728889725cc1c2d9c8be18dbce17935992980fc1b9a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:43Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:43 crc kubenswrapper[4875]: I1007 07:56:43.362949 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:43Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:43 crc kubenswrapper[4875]: I1007 07:56:43.377873 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 07:56:43 crc kubenswrapper[4875]: I1007 07:56:43.378032 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 07:56:43 crc kubenswrapper[4875]: I1007 07:56:43.378079 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 07:56:43 crc kubenswrapper[4875]: I1007 07:56:43.378107 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 07:56:43 crc kubenswrapper[4875]: I1007 07:56:43.378130 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 07:56:43 crc kubenswrapper[4875]: E1007 07:56:43.378249 4875 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 07 07:56:43 crc kubenswrapper[4875]: E1007 07:56:43.378303 4875 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-07 07:56:51.378287152 +0000 UTC m=+36.338057695 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 07 07:56:43 crc kubenswrapper[4875]: E1007 07:56:43.378577 4875 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 07:56:51.378566361 +0000 UTC m=+36.338336904 (durationBeforeRetry 8s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 07:56:43 crc kubenswrapper[4875]: E1007 07:56:43.378622 4875 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 07 07:56:43 crc kubenswrapper[4875]: E1007 07:56:43.378649 4875 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-07 07:56:51.378642683 +0000 UTC m=+36.338413226 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 07 07:56:43 crc kubenswrapper[4875]: E1007 07:56:43.378672 4875 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 07 07:56:43 crc kubenswrapper[4875]: E1007 07:56:43.378690 4875 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 07 07:56:43 crc kubenswrapper[4875]: E1007 07:56:43.378704 4875 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 07 07:56:43 crc kubenswrapper[4875]: E1007 07:56:43.378736 4875 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-07 07:56:51.378726266 +0000 UTC m=+36.338496809 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 07 07:56:43 crc kubenswrapper[4875]: E1007 07:56:43.378656 4875 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 07 07:56:43 crc kubenswrapper[4875]: E1007 07:56:43.378989 4875 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 07 07:56:43 crc kubenswrapper[4875]: E1007 07:56:43.379004 4875 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 07 07:56:43 crc kubenswrapper[4875]: E1007 07:56:43.379100 4875 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-07 07:56:51.379080676 +0000 UTC m=+36.338851219 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 07 07:56:43 crc kubenswrapper[4875]: I1007 07:56:43.391560 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:43Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:43 crc kubenswrapper[4875]: I1007 07:56:43.416493 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:43 crc kubenswrapper[4875]: I1007 07:56:43.416534 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:43 crc kubenswrapper[4875]: I1007 07:56:43.416544 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:43 crc kubenswrapper[4875]: I1007 07:56:43.416559 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:43 crc kubenswrapper[4875]: I1007 07:56:43.416568 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:43Z","lastTransitionTime":"2025-10-07T07:56:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:43 crc kubenswrapper[4875]: I1007 07:56:43.432175 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-9tw9m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f8b02a8a-b8d2-4097-b768-132e4c46938a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f6efce0afada950650dfd1a7345a5132509ead755458e53915fcf3335ffe73e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-9tw9m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:43Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:43 crc kubenswrapper[4875]: I1007 07:56:43.518450 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:43 crc kubenswrapper[4875]: I1007 07:56:43.518618 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:43 crc kubenswrapper[4875]: I1007 07:56:43.518635 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:43 crc kubenswrapper[4875]: I1007 07:56:43.518649 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:43 crc kubenswrapper[4875]: I1007 07:56:43.518660 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:43Z","lastTransitionTime":"2025-10-07T07:56:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:43 crc kubenswrapper[4875]: I1007 07:56:43.621125 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:43 crc kubenswrapper[4875]: I1007 07:56:43.621176 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:43 crc kubenswrapper[4875]: I1007 07:56:43.621187 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:43 crc kubenswrapper[4875]: I1007 07:56:43.621204 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:43 crc kubenswrapper[4875]: I1007 07:56:43.621215 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:43Z","lastTransitionTime":"2025-10-07T07:56:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:43 crc kubenswrapper[4875]: I1007 07:56:43.697269 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 07:56:43 crc kubenswrapper[4875]: I1007 07:56:43.697281 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 07:56:43 crc kubenswrapper[4875]: E1007 07:56:43.697406 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 07:56:43 crc kubenswrapper[4875]: E1007 07:56:43.697571 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 07:56:43 crc kubenswrapper[4875]: I1007 07:56:43.723061 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:43 crc kubenswrapper[4875]: I1007 07:56:43.723095 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:43 crc kubenswrapper[4875]: I1007 07:56:43.723107 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:43 crc kubenswrapper[4875]: I1007 07:56:43.723123 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:43 crc kubenswrapper[4875]: I1007 07:56:43.723134 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:43Z","lastTransitionTime":"2025-10-07T07:56:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:43 crc kubenswrapper[4875]: I1007 07:56:43.825413 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:43 crc kubenswrapper[4875]: I1007 07:56:43.825455 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:43 crc kubenswrapper[4875]: I1007 07:56:43.825466 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:43 crc kubenswrapper[4875]: I1007 07:56:43.825480 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:43 crc kubenswrapper[4875]: I1007 07:56:43.825491 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:43Z","lastTransitionTime":"2025-10-07T07:56:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:43 crc kubenswrapper[4875]: I1007 07:56:43.924248 4875 generic.go:334] "Generic (PLEG): container finished" podID="7021fc56-b485-4ca6-80e8-56665ade004f" containerID="c07a8fe8d39f3de33d3b132c33386a5c63a0948a1205f48e9a8788dd77ba8bc2" exitCode=0 Oct 07 07:56:43 crc kubenswrapper[4875]: I1007 07:56:43.924330 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-zk2kz" event={"ID":"7021fc56-b485-4ca6-80e8-56665ade004f","Type":"ContainerDied","Data":"c07a8fe8d39f3de33d3b132c33386a5c63a0948a1205f48e9a8788dd77ba8bc2"} Oct 07 07:56:43 crc kubenswrapper[4875]: I1007 07:56:43.926951 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:43 crc kubenswrapper[4875]: I1007 07:56:43.927002 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:43 crc kubenswrapper[4875]: I1007 07:56:43.927013 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:43 crc kubenswrapper[4875]: I1007 07:56:43.927032 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:43 crc kubenswrapper[4875]: I1007 07:56:43.927044 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:43Z","lastTransitionTime":"2025-10-07T07:56:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:43 crc kubenswrapper[4875]: I1007 07:56:43.943128 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:43Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:43 crc kubenswrapper[4875]: I1007 07:56:43.961538 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-zk2kz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7021fc56-b485-4ca6-80e8-56665ade004f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d91377f30f2c7676df79d679bad49e0a162667214bd7e6ed4ad8ef3606a8752\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d91377f30f2c7676df79d679bad49e0a162667214bd7e6ed4ad8ef3606a8752\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecf0876b0824ab4fd9c692b0455050ac0a2d61663f74f13f578aebd6f3869f3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ecf0876b0824ab4fd9c692b0455050ac0a2d61663f74f13f578aebd6f3869f3f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9714afc60a059f6aa35d965ae1e15ea03e01cf9162a44d632ceed9d772e966bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9714afc60a059f6aa35d965ae1e15ea03e01cf9162a44d632ceed9d772e966bf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1f6109783534c4c80c81daf9eb0f04a9fbced0b143aeaa0c76683b9d2d38669\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c1f6109783534c4c80c81daf9eb0f04a9fbced0b143aeaa0c76683b9d2d38669\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c07a8fe8d39f3de33d3b132c33386a5c63a0948a1205f48e9a8788dd77ba8bc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c07a8fe8d39f3de33d3b132c33386a5c63a0948a1205f48e9a8788dd77ba8bc2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-zk2kz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:43Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:43 crc kubenswrapper[4875]: I1007 07:56:43.972528 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ddx6l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0605d52-0cfc-4bcf-9218-1991257047cd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bdc7823b5b4f40adc245f9650864ebcb03f791dcbd6167f41c362bcdcc3a6655\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l9bwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:39Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ddx6l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:43Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:43 crc kubenswrapper[4875]: I1007 07:56:43.984373 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62ae4c53-7cf1-4229-84b4-045397dbcfba\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ae8881986c1b2abd841adfadbac7dc3d73096c5366485350536c08f478f9762\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://05f20f2103458077317614bcd5338d803c52c67e7dfc562103c817d33fa5bc79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f2213a20d36b8a125c6004957d5be402e0b85bbc2165ebd964ab499dfffb877\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://841f5c2218ad3912bf23dc2df80f24ca696829ba8e2c2d9722264688b25f0846\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:15Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:43Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:44 crc kubenswrapper[4875]: I1007 07:56:44.002244 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3527b3b9-1734-481d-9d80-9093e388afec\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7149243f8781d5a5e845821a8016bc06ec3678d69dfbde72d3429c48c64a98b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51d55feee1c9378afae0885cd063f39a2b4bd057db72c776941837d8d4098132\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6dc29635af9843109e3c200fc756d352e1a81b01dcc52a30a005291d1fa9f8d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e0b3573713f1b9d23177e5a1412d8db366ca718e6c19f6af9e234940505f2e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20382715fd9d79a3b60b16d950d6c4fe5b0d34ae14ad034f684d97762205263a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cfae59bc7d6183b2b5c6c04dd53b1bfc8b62abf90f67f74c7a56bf5039a5b50a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cfae59bc7d6183b2b5c6c04dd53b1bfc8b62abf90f67f74c7a56bf5039a5b50a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88968e9f639dc3664a87729128c531283b37b7c2da6aae70905f9f63a980fb54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88968e9f639dc3664a87729128c531283b37b7c2da6aae70905f9f63a980fb54\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://1ae0cbc996e8c2d24e8fbbf34b292d994bcc704c7b27fdc8b7fb7e64be0902ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1ae0cbc996e8c2d24e8fbbf34b292d994bcc704c7b27fdc8b7fb7e64be0902ca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:15Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:44Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:44 crc kubenswrapper[4875]: I1007 07:56:44.013300 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://455d92e6c0a43eabcbdc94940cb5bb0308a747e311fb79711b5ec8586d2f290c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e98da4809cdbbaa0bc7a3ed73df6eeb9260101f8336e16e3e93cba2be423f957\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:44Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:44 crc kubenswrapper[4875]: I1007 07:56:44.028722 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-8tcxj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f7806e48-48e7-4680-af2e-e93a05003370\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e7cdfd8fed2f21e7063841e5382e668130288feffd52ef6fc5f9b5bc1894f2a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e7cdfd8fed2f21e7063841e5382e668130288feffd52ef6fc5f9b5bc1894f2a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-8tcxj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:44Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:44 crc kubenswrapper[4875]: I1007 07:56:44.029533 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:44 crc kubenswrapper[4875]: I1007 07:56:44.029551 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:44 crc kubenswrapper[4875]: I1007 07:56:44.029558 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:44 crc kubenswrapper[4875]: I1007 07:56:44.029592 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:44 crc kubenswrapper[4875]: I1007 07:56:44.029604 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:44Z","lastTransitionTime":"2025-10-07T07:56:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:44 crc kubenswrapper[4875]: I1007 07:56:44.039705 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01fe5bea18c473e716831e2a9a876a8517d6de78b9b870929ecc1c4397d33fb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:44Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:44 crc kubenswrapper[4875]: I1007 07:56:44.050086 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wc2jq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e5a790e1-c591-4cfc-930f-4805a923790b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0fda9fd366641ca5799c0ca6c7aa926af6b41609e157c11a15b2077d4a5ce3aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4s782\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wc2jq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:44Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:44 crc kubenswrapper[4875]: I1007 07:56:44.059216 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3928c10c-c3da-41eb-96b2-629d67cfb31f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://27e5a1d7a65296f23b224f9ace2760c32b6e1ca146cf7a42a601de22632250d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qd4f6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34b235fca1c383e868641a953bb908901bcd10528ee9fb668c1f71eb6e0e978a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qd4f6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hx68m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:44Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:44 crc kubenswrapper[4875]: I1007 07:56:44.072025 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"01cd6f59-4f16-41d3-b43b-cd3cb9efd9b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ca1c70fa4c2dd9e87cc15766e27c2735768f392d019d6004ae50c94595651c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4bed5cf79847998d3d72c69b2f5773d64affb1a1f13ecc3af99bda8307bb8d1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ba6ab5ec6a70a00059ba119970c6b139879c3c568475f2acf45b00f4ade9e22\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://671f4d3e518bf0230af61a6bfc3f70b8cea7f0e8f972ad605a78cded0306b084\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://671f4d3e518bf0230af61a6bfc3f70b8cea7f0e8f972ad605a78cded0306b084\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"message\\\":\\\"le observer\\\\nW1007 07:56:35.379471 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1007 07:56:35.379613 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1007 07:56:35.380419 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1733405231/tls.crt::/tmp/serving-cert-1733405231/tls.key\\\\\\\"\\\\nI1007 07:56:35.960385 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1007 07:56:35.963357 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1007 07:56:35.963380 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1007 07:56:35.963401 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1007 07:56:35.963407 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1007 07:56:35.978178 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1007 07:56:35.978231 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1007 07:56:35.978242 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI1007 07:56:35.978236 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1007 07:56:35.978254 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1007 07:56:35.978304 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1007 07:56:35.978312 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1007 07:56:35.978319 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1007 07:56:35.979012 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0b72e1d82ba1cfc118ea122535db5c358f503a17b497c21c481e1b56bb578e8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec502d866371b40ecc3c8728889725cc1c2d9c8be18dbce17935992980fc1b9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec502d866371b40ecc3c8728889725cc1c2d9c8be18dbce17935992980fc1b9a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:44Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:44 crc kubenswrapper[4875]: I1007 07:56:44.081486 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:44Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:44 crc kubenswrapper[4875]: I1007 07:56:44.093480 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43e570e7b8391b0f38f3228dc0380ac8ee23e83f498989343bdee6a53c3ede41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:44Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:44 crc kubenswrapper[4875]: I1007 07:56:44.105097 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:44Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:44 crc kubenswrapper[4875]: I1007 07:56:44.113719 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-9tw9m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f8b02a8a-b8d2-4097-b768-132e4c46938a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f6efce0afada950650dfd1a7345a5132509ead755458e53915fcf3335ffe73e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-9tw9m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:44Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:44 crc kubenswrapper[4875]: I1007 07:56:44.131372 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:44 crc kubenswrapper[4875]: I1007 07:56:44.131430 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:44 crc kubenswrapper[4875]: I1007 07:56:44.131446 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:44 crc kubenswrapper[4875]: I1007 07:56:44.131472 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:44 crc kubenswrapper[4875]: I1007 07:56:44.131485 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:44Z","lastTransitionTime":"2025-10-07T07:56:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:44 crc kubenswrapper[4875]: I1007 07:56:44.233716 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:44 crc kubenswrapper[4875]: I1007 07:56:44.233757 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:44 crc kubenswrapper[4875]: I1007 07:56:44.233769 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:44 crc kubenswrapper[4875]: I1007 07:56:44.233785 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:44 crc kubenswrapper[4875]: I1007 07:56:44.233796 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:44Z","lastTransitionTime":"2025-10-07T07:56:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:44 crc kubenswrapper[4875]: I1007 07:56:44.339048 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:44 crc kubenswrapper[4875]: I1007 07:56:44.339523 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:44 crc kubenswrapper[4875]: I1007 07:56:44.339537 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:44 crc kubenswrapper[4875]: I1007 07:56:44.339556 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:44 crc kubenswrapper[4875]: I1007 07:56:44.339572 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:44Z","lastTransitionTime":"2025-10-07T07:56:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:44 crc kubenswrapper[4875]: I1007 07:56:44.443424 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:44 crc kubenswrapper[4875]: I1007 07:56:44.443500 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:44 crc kubenswrapper[4875]: I1007 07:56:44.443515 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:44 crc kubenswrapper[4875]: I1007 07:56:44.443537 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:44 crc kubenswrapper[4875]: I1007 07:56:44.443551 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:44Z","lastTransitionTime":"2025-10-07T07:56:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:44 crc kubenswrapper[4875]: I1007 07:56:44.546016 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:44 crc kubenswrapper[4875]: I1007 07:56:44.546087 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:44 crc kubenswrapper[4875]: I1007 07:56:44.546100 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:44 crc kubenswrapper[4875]: I1007 07:56:44.546117 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:44 crc kubenswrapper[4875]: I1007 07:56:44.546131 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:44Z","lastTransitionTime":"2025-10-07T07:56:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:44 crc kubenswrapper[4875]: I1007 07:56:44.648845 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:44 crc kubenswrapper[4875]: I1007 07:56:44.648916 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:44 crc kubenswrapper[4875]: I1007 07:56:44.648926 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:44 crc kubenswrapper[4875]: I1007 07:56:44.648942 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:44 crc kubenswrapper[4875]: I1007 07:56:44.648954 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:44Z","lastTransitionTime":"2025-10-07T07:56:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:44 crc kubenswrapper[4875]: I1007 07:56:44.696746 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 07:56:44 crc kubenswrapper[4875]: E1007 07:56:44.696900 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 07:56:44 crc kubenswrapper[4875]: I1007 07:56:44.751476 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:44 crc kubenswrapper[4875]: I1007 07:56:44.751588 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:44 crc kubenswrapper[4875]: I1007 07:56:44.751609 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:44 crc kubenswrapper[4875]: I1007 07:56:44.751642 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:44 crc kubenswrapper[4875]: I1007 07:56:44.751661 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:44Z","lastTransitionTime":"2025-10-07T07:56:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:44 crc kubenswrapper[4875]: I1007 07:56:44.855028 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:44 crc kubenswrapper[4875]: I1007 07:56:44.855084 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:44 crc kubenswrapper[4875]: I1007 07:56:44.855100 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:44 crc kubenswrapper[4875]: I1007 07:56:44.855122 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:44 crc kubenswrapper[4875]: I1007 07:56:44.855134 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:44Z","lastTransitionTime":"2025-10-07T07:56:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:44 crc kubenswrapper[4875]: I1007 07:56:44.932468 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-8tcxj" event={"ID":"f7806e48-48e7-4680-af2e-e93a05003370","Type":"ContainerStarted","Data":"f27830fee340a34b4ca9b87883890143fca897750ccc09daf0f801fe6d16123a"} Oct 07 07:56:44 crc kubenswrapper[4875]: I1007 07:56:44.932754 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-8tcxj" Oct 07 07:56:44 crc kubenswrapper[4875]: I1007 07:56:44.932926 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-8tcxj" Oct 07 07:56:44 crc kubenswrapper[4875]: I1007 07:56:44.932980 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-8tcxj" Oct 07 07:56:44 crc kubenswrapper[4875]: I1007 07:56:44.938540 4875 generic.go:334] "Generic (PLEG): container finished" podID="7021fc56-b485-4ca6-80e8-56665ade004f" containerID="bce803064d5256265cdaf7f6e16d02780eab07d41d1f547dedd1c1b85972f543" exitCode=0 Oct 07 07:56:44 crc kubenswrapper[4875]: I1007 07:56:44.938577 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-zk2kz" event={"ID":"7021fc56-b485-4ca6-80e8-56665ade004f","Type":"ContainerDied","Data":"bce803064d5256265cdaf7f6e16d02780eab07d41d1f547dedd1c1b85972f543"} Oct 07 07:56:44 crc kubenswrapper[4875]: I1007 07:56:44.948800 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:44Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:44 crc kubenswrapper[4875]: I1007 07:56:44.957977 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:44 crc kubenswrapper[4875]: I1007 07:56:44.958006 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:44 crc kubenswrapper[4875]: I1007 07:56:44.958015 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:44 crc kubenswrapper[4875]: I1007 07:56:44.958030 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:44 crc kubenswrapper[4875]: I1007 07:56:44.958040 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:44Z","lastTransitionTime":"2025-10-07T07:56:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:44 crc kubenswrapper[4875]: I1007 07:56:44.960355 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-9tw9m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f8b02a8a-b8d2-4097-b768-132e4c46938a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f6efce0afada950650dfd1a7345a5132509ead755458e53915fcf3335ffe73e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-9tw9m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:44Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:44 crc kubenswrapper[4875]: I1007 07:56:44.970454 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:44Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:44 crc kubenswrapper[4875]: I1007 07:56:44.983811 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-zk2kz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7021fc56-b485-4ca6-80e8-56665ade004f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d91377f30f2c7676df79d679bad49e0a162667214bd7e6ed4ad8ef3606a8752\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d91377f30f2c7676df79d679bad49e0a162667214bd7e6ed4ad8ef3606a8752\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecf0876b0824ab4fd9c692b0455050ac0a2d61663f74f13f578aebd6f3869f3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ecf0876b0824ab4fd9c692b0455050ac0a2d61663f74f13f578aebd6f3869f3f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9714afc60a059f6aa35d965ae1e15ea03e01cf9162a44d632ceed9d772e966bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9714afc60a059f6aa35d965ae1e15ea03e01cf9162a44d632ceed9d772e966bf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1f6109783534c4c80c81daf9eb0f04a9fbced0b143aeaa0c76683b9d2d38669\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c1f6109783534c4c80c81daf9eb0f04a9fbced0b143aeaa0c76683b9d2d38669\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c07a8fe8d39f3de33d3b132c33386a5c63a0948a1205f48e9a8788dd77ba8bc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c07a8fe8d39f3de33d3b132c33386a5c63a0948a1205f48e9a8788dd77ba8bc2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-zk2kz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:44Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:44 crc kubenswrapper[4875]: I1007 07:56:44.996092 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62ae4c53-7cf1-4229-84b4-045397dbcfba\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ae8881986c1b2abd841adfadbac7dc3d73096c5366485350536c08f478f9762\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://05f20f2103458077317614bcd5338d803c52c67e7dfc562103c817d33fa5bc79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f2213a20d36b8a125c6004957d5be402e0b85bbc2165ebd964ab499dfffb877\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://841f5c2218ad3912bf23dc2df80f24ca696829ba8e2c2d9722264688b25f0846\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:15Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:44Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:45 crc kubenswrapper[4875]: I1007 07:56:45.012053 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3527b3b9-1734-481d-9d80-9093e388afec\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7149243f8781d5a5e845821a8016bc06ec3678d69dfbde72d3429c48c64a98b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51d55feee1c9378afae0885cd063f39a2b4bd057db72c776941837d8d4098132\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6dc29635af9843109e3c200fc756d352e1a81b01dcc52a30a005291d1fa9f8d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e0b3573713f1b9d23177e5a1412d8db366ca718e6c19f6af9e234940505f2e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20382715fd9d79a3b60b16d950d6c4fe5b0d34ae14ad034f684d97762205263a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cfae59bc7d6183b2b5c6c04dd53b1bfc8b62abf90f67f74c7a56bf5039a5b50a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cfae59bc7d6183b2b5c6c04dd53b1bfc8b62abf90f67f74c7a56bf5039a5b50a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88968e9f639dc3664a87729128c531283b37b7c2da6aae70905f9f63a980fb54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88968e9f639dc3664a87729128c531283b37b7c2da6aae70905f9f63a980fb54\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://1ae0cbc996e8c2d24e8fbbf34b292d994bcc704c7b27fdc8b7fb7e64be0902ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1ae0cbc996e8c2d24e8fbbf34b292d994bcc704c7b27fdc8b7fb7e64be0902ca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:15Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:45Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:45 crc kubenswrapper[4875]: I1007 07:56:45.021482 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://455d92e6c0a43eabcbdc94940cb5bb0308a747e311fb79711b5ec8586d2f290c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e98da4809cdbbaa0bc7a3ed73df6eeb9260101f8336e16e3e93cba2be423f957\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:45Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:45 crc kubenswrapper[4875]: I1007 07:56:45.029671 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-8tcxj" Oct 07 07:56:45 crc kubenswrapper[4875]: I1007 07:56:45.030485 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-8tcxj" Oct 07 07:56:45 crc kubenswrapper[4875]: I1007 07:56:45.041750 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-8tcxj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f7806e48-48e7-4680-af2e-e93a05003370\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e5e9d96fa7102ca0dcbb0fcc156b21899217a32baae85be23379cca6c8a6a93c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2df6aadbe568d17b58738f194596f0e83432176769f7c5bd87e7d4000d3f1ed8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5eb9157d6f41a164c7cb952c983b441030a97a8f3b8009359440c5eba9846f26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8c31cbcb48e003dac873f108a5eb3aaff8d7016f7557fb683ae607d7819d7ceb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c944396320312ab92eb8dd7f1d771a00b965031717edbbeb2b467cee0788dec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6db9e77922c120dbfa1ccd55c96777ac3b18e0fcf8bb96956414858ae2c8fcff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f27830fee340a34b4ca9b87883890143fca897750ccc09daf0f801fe6d16123a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b3b1269130a0a1192546bec4aa5b2d4bfd65f381f6cd3948dd54d49b482e6d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e7cdfd8fed2f21e7063841e5382e668130288feffd52ef6fc5f9b5bc1894f2a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e7cdfd8fed2f21e7063841e5382e668130288feffd52ef6fc5f9b5bc1894f2a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-8tcxj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:45Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:45 crc kubenswrapper[4875]: I1007 07:56:45.053440 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ddx6l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0605d52-0cfc-4bcf-9218-1991257047cd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bdc7823b5b4f40adc245f9650864ebcb03f791dcbd6167f41c362bcdcc3a6655\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l9bwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:39Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ddx6l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:45Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:45 crc kubenswrapper[4875]: I1007 07:56:45.059708 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:45 crc kubenswrapper[4875]: I1007 07:56:45.059734 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:45 crc kubenswrapper[4875]: I1007 07:56:45.059742 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:45 crc kubenswrapper[4875]: I1007 07:56:45.059755 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:45 crc kubenswrapper[4875]: I1007 07:56:45.059764 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:45Z","lastTransitionTime":"2025-10-07T07:56:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:45 crc kubenswrapper[4875]: I1007 07:56:45.066822 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3928c10c-c3da-41eb-96b2-629d67cfb31f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://27e5a1d7a65296f23b224f9ace2760c32b6e1ca146cf7a42a601de22632250d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qd4f6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34b235fca1c383e868641a953bb908901bcd10528ee9fb668c1f71eb6e0e978a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qd4f6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hx68m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:45Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:45 crc kubenswrapper[4875]: I1007 07:56:45.085438 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"01cd6f59-4f16-41d3-b43b-cd3cb9efd9b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ca1c70fa4c2dd9e87cc15766e27c2735768f392d019d6004ae50c94595651c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4bed5cf79847998d3d72c69b2f5773d64affb1a1f13ecc3af99bda8307bb8d1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ba6ab5ec6a70a00059ba119970c6b139879c3c568475f2acf45b00f4ade9e22\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://671f4d3e518bf0230af61a6bfc3f70b8cea7f0e8f972ad605a78cded0306b084\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://671f4d3e518bf0230af61a6bfc3f70b8cea7f0e8f972ad605a78cded0306b084\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"message\\\":\\\"le observer\\\\nW1007 07:56:35.379471 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1007 07:56:35.379613 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1007 07:56:35.380419 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1733405231/tls.crt::/tmp/serving-cert-1733405231/tls.key\\\\\\\"\\\\nI1007 07:56:35.960385 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1007 07:56:35.963357 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1007 07:56:35.963380 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1007 07:56:35.963401 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1007 07:56:35.963407 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1007 07:56:35.978178 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1007 07:56:35.978231 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1007 07:56:35.978242 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI1007 07:56:35.978236 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1007 07:56:35.978254 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1007 07:56:35.978304 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1007 07:56:35.978312 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1007 07:56:35.978319 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1007 07:56:35.979012 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0b72e1d82ba1cfc118ea122535db5c358f503a17b497c21c481e1b56bb578e8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec502d866371b40ecc3c8728889725cc1c2d9c8be18dbce17935992980fc1b9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec502d866371b40ecc3c8728889725cc1c2d9c8be18dbce17935992980fc1b9a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:45Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:45 crc kubenswrapper[4875]: I1007 07:56:45.097918 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:45Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:45 crc kubenswrapper[4875]: I1007 07:56:45.110666 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43e570e7b8391b0f38f3228dc0380ac8ee23e83f498989343bdee6a53c3ede41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:45Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:45 crc kubenswrapper[4875]: I1007 07:56:45.123029 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01fe5bea18c473e716831e2a9a876a8517d6de78b9b870929ecc1c4397d33fb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:45Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:45 crc kubenswrapper[4875]: I1007 07:56:45.138972 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wc2jq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e5a790e1-c591-4cfc-930f-4805a923790b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0fda9fd366641ca5799c0ca6c7aa926af6b41609e157c11a15b2077d4a5ce3aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4s782\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wc2jq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:45Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:45 crc kubenswrapper[4875]: I1007 07:56:45.153014 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62ae4c53-7cf1-4229-84b4-045397dbcfba\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ae8881986c1b2abd841adfadbac7dc3d73096c5366485350536c08f478f9762\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://05f20f2103458077317614bcd5338d803c52c67e7dfc562103c817d33fa5bc79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f2213a20d36b8a125c6004957d5be402e0b85bbc2165ebd964ab499dfffb877\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://841f5c2218ad3912bf23dc2df80f24ca696829ba8e2c2d9722264688b25f0846\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:15Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:45Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:45 crc kubenswrapper[4875]: I1007 07:56:45.162676 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:45 crc kubenswrapper[4875]: I1007 07:56:45.162724 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:45 crc kubenswrapper[4875]: I1007 07:56:45.162738 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:45 crc kubenswrapper[4875]: I1007 07:56:45.162759 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:45 crc kubenswrapper[4875]: I1007 07:56:45.162774 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:45Z","lastTransitionTime":"2025-10-07T07:56:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:45 crc kubenswrapper[4875]: I1007 07:56:45.172481 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3527b3b9-1734-481d-9d80-9093e388afec\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7149243f8781d5a5e845821a8016bc06ec3678d69dfbde72d3429c48c64a98b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51d55feee1c9378afae0885cd063f39a2b4bd057db72c776941837d8d4098132\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6dc29635af9843109e3c200fc756d352e1a81b01dcc52a30a005291d1fa9f8d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e0b3573713f1b9d23177e5a1412d8db366ca718e6c19f6af9e234940505f2e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20382715fd9d79a3b60b16d950d6c4fe5b0d34ae14ad034f684d97762205263a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cfae59bc7d6183b2b5c6c04dd53b1bfc8b62abf90f67f74c7a56bf5039a5b50a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cfae59bc7d6183b2b5c6c04dd53b1bfc8b62abf90f67f74c7a56bf5039a5b50a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88968e9f639dc3664a87729128c531283b37b7c2da6aae70905f9f63a980fb54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88968e9f639dc3664a87729128c531283b37b7c2da6aae70905f9f63a980fb54\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://1ae0cbc996e8c2d24e8fbbf34b292d994bcc704c7b27fdc8b7fb7e64be0902ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1ae0cbc996e8c2d24e8fbbf34b292d994bcc704c7b27fdc8b7fb7e64be0902ca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:15Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:45Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:45 crc kubenswrapper[4875]: I1007 07:56:45.184015 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://455d92e6c0a43eabcbdc94940cb5bb0308a747e311fb79711b5ec8586d2f290c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e98da4809cdbbaa0bc7a3ed73df6eeb9260101f8336e16e3e93cba2be423f957\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:45Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:45 crc kubenswrapper[4875]: I1007 07:56:45.187159 4875 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 07 07:56:45 crc kubenswrapper[4875]: I1007 07:56:45.187711 4875 scope.go:117] "RemoveContainer" containerID="671f4d3e518bf0230af61a6bfc3f70b8cea7f0e8f972ad605a78cded0306b084" Oct 07 07:56:45 crc kubenswrapper[4875]: E1007 07:56:45.187846 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-apiserver-check-endpoints\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\"" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" Oct 07 07:56:45 crc kubenswrapper[4875]: I1007 07:56:45.199955 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-8tcxj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f7806e48-48e7-4680-af2e-e93a05003370\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e5e9d96fa7102ca0dcbb0fcc156b21899217a32baae85be23379cca6c8a6a93c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2df6aadbe568d17b58738f194596f0e83432176769f7c5bd87e7d4000d3f1ed8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5eb9157d6f41a164c7cb952c983b441030a97a8f3b8009359440c5eba9846f26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8c31cbcb48e003dac873f108a5eb3aaff8d7016f7557fb683ae607d7819d7ceb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c944396320312ab92eb8dd7f1d771a00b965031717edbbeb2b467cee0788dec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6db9e77922c120dbfa1ccd55c96777ac3b18e0fcf8bb96956414858ae2c8fcff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f27830fee340a34b4ca9b87883890143fca897750ccc09daf0f801fe6d16123a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b3b1269130a0a1192546bec4aa5b2d4bfd65f381f6cd3948dd54d49b482e6d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e7cdfd8fed2f21e7063841e5382e668130288feffd52ef6fc5f9b5bc1894f2a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e7cdfd8fed2f21e7063841e5382e668130288feffd52ef6fc5f9b5bc1894f2a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-8tcxj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:45Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:45 crc kubenswrapper[4875]: I1007 07:56:45.209340 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ddx6l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0605d52-0cfc-4bcf-9218-1991257047cd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bdc7823b5b4f40adc245f9650864ebcb03f791dcbd6167f41c362bcdcc3a6655\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l9bwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:39Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ddx6l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:45Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:45 crc kubenswrapper[4875]: I1007 07:56:45.220391 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3928c10c-c3da-41eb-96b2-629d67cfb31f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://27e5a1d7a65296f23b224f9ace2760c32b6e1ca146cf7a42a601de22632250d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qd4f6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34b235fca1c383e868641a953bb908901bcd10528ee9fb668c1f71eb6e0e978a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qd4f6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hx68m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:45Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:45 crc kubenswrapper[4875]: I1007 07:56:45.232030 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"01cd6f59-4f16-41d3-b43b-cd3cb9efd9b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ca1c70fa4c2dd9e87cc15766e27c2735768f392d019d6004ae50c94595651c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4bed5cf79847998d3d72c69b2f5773d64affb1a1f13ecc3af99bda8307bb8d1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ba6ab5ec6a70a00059ba119970c6b139879c3c568475f2acf45b00f4ade9e22\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://671f4d3e518bf0230af61a6bfc3f70b8cea7f0e8f972ad605a78cded0306b084\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://671f4d3e518bf0230af61a6bfc3f70b8cea7f0e8f972ad605a78cded0306b084\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"message\\\":\\\"le observer\\\\nW1007 07:56:35.379471 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1007 07:56:35.379613 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1007 07:56:35.380419 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1733405231/tls.crt::/tmp/serving-cert-1733405231/tls.key\\\\\\\"\\\\nI1007 07:56:35.960385 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1007 07:56:35.963357 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1007 07:56:35.963380 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1007 07:56:35.963401 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1007 07:56:35.963407 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1007 07:56:35.978178 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1007 07:56:35.978231 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1007 07:56:35.978242 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI1007 07:56:35.978236 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1007 07:56:35.978254 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1007 07:56:35.978304 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1007 07:56:35.978312 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1007 07:56:35.978319 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1007 07:56:35.979012 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0b72e1d82ba1cfc118ea122535db5c358f503a17b497c21c481e1b56bb578e8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec502d866371b40ecc3c8728889725cc1c2d9c8be18dbce17935992980fc1b9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec502d866371b40ecc3c8728889725cc1c2d9c8be18dbce17935992980fc1b9a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:45Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:45 crc kubenswrapper[4875]: I1007 07:56:45.244154 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:45Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:45 crc kubenswrapper[4875]: I1007 07:56:45.257289 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43e570e7b8391b0f38f3228dc0380ac8ee23e83f498989343bdee6a53c3ede41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:45Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:45 crc kubenswrapper[4875]: I1007 07:56:45.264944 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:45 crc kubenswrapper[4875]: I1007 07:56:45.264975 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:45 crc kubenswrapper[4875]: I1007 07:56:45.264984 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:45 crc kubenswrapper[4875]: I1007 07:56:45.264997 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:45 crc kubenswrapper[4875]: I1007 07:56:45.265006 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:45Z","lastTransitionTime":"2025-10-07T07:56:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:45 crc kubenswrapper[4875]: I1007 07:56:45.269310 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01fe5bea18c473e716831e2a9a876a8517d6de78b9b870929ecc1c4397d33fb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:45Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:45 crc kubenswrapper[4875]: I1007 07:56:45.281953 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wc2jq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e5a790e1-c591-4cfc-930f-4805a923790b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0fda9fd366641ca5799c0ca6c7aa926af6b41609e157c11a15b2077d4a5ce3aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4s782\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wc2jq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:45Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:45 crc kubenswrapper[4875]: I1007 07:56:45.292926 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:45Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:45 crc kubenswrapper[4875]: I1007 07:56:45.301618 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-9tw9m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f8b02a8a-b8d2-4097-b768-132e4c46938a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f6efce0afada950650dfd1a7345a5132509ead755458e53915fcf3335ffe73e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-9tw9m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:45Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:45 crc kubenswrapper[4875]: I1007 07:56:45.312329 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:45Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:45 crc kubenswrapper[4875]: I1007 07:56:45.326352 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-zk2kz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7021fc56-b485-4ca6-80e8-56665ade004f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d91377f30f2c7676df79d679bad49e0a162667214bd7e6ed4ad8ef3606a8752\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d91377f30f2c7676df79d679bad49e0a162667214bd7e6ed4ad8ef3606a8752\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecf0876b0824ab4fd9c692b0455050ac0a2d61663f74f13f578aebd6f3869f3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ecf0876b0824ab4fd9c692b0455050ac0a2d61663f74f13f578aebd6f3869f3f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9714afc60a059f6aa35d965ae1e15ea03e01cf9162a44d632ceed9d772e966bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9714afc60a059f6aa35d965ae1e15ea03e01cf9162a44d632ceed9d772e966bf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1f6109783534c4c80c81daf9eb0f04a9fbced0b143aeaa0c76683b9d2d38669\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c1f6109783534c4c80c81daf9eb0f04a9fbced0b143aeaa0c76683b9d2d38669\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c07a8fe8d39f3de33d3b132c33386a5c63a0948a1205f48e9a8788dd77ba8bc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c07a8fe8d39f3de33d3b132c33386a5c63a0948a1205f48e9a8788dd77ba8bc2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bce803064d5256265cdaf7f6e16d02780eab07d41d1f547dedd1c1b85972f543\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bce803064d5256265cdaf7f6e16d02780eab07d41d1f547dedd1c1b85972f543\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-zk2kz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:45Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:45 crc kubenswrapper[4875]: I1007 07:56:45.367166 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:45 crc kubenswrapper[4875]: I1007 07:56:45.367209 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:45 crc kubenswrapper[4875]: I1007 07:56:45.367221 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:45 crc kubenswrapper[4875]: I1007 07:56:45.367238 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:45 crc kubenswrapper[4875]: I1007 07:56:45.367248 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:45Z","lastTransitionTime":"2025-10-07T07:56:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:45 crc kubenswrapper[4875]: I1007 07:56:45.470087 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:45 crc kubenswrapper[4875]: I1007 07:56:45.470114 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:45 crc kubenswrapper[4875]: I1007 07:56:45.470123 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:45 crc kubenswrapper[4875]: I1007 07:56:45.470136 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:45 crc kubenswrapper[4875]: I1007 07:56:45.470145 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:45Z","lastTransitionTime":"2025-10-07T07:56:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:45 crc kubenswrapper[4875]: I1007 07:56:45.573250 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:45 crc kubenswrapper[4875]: I1007 07:56:45.573297 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:45 crc kubenswrapper[4875]: I1007 07:56:45.573311 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:45 crc kubenswrapper[4875]: I1007 07:56:45.573335 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:45 crc kubenswrapper[4875]: I1007 07:56:45.573350 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:45Z","lastTransitionTime":"2025-10-07T07:56:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:45 crc kubenswrapper[4875]: I1007 07:56:45.675232 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:45 crc kubenswrapper[4875]: I1007 07:56:45.675269 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:45 crc kubenswrapper[4875]: I1007 07:56:45.675280 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:45 crc kubenswrapper[4875]: I1007 07:56:45.675295 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:45 crc kubenswrapper[4875]: I1007 07:56:45.675306 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:45Z","lastTransitionTime":"2025-10-07T07:56:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:45 crc kubenswrapper[4875]: I1007 07:56:45.697091 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 07:56:45 crc kubenswrapper[4875]: E1007 07:56:45.697221 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 07:56:45 crc kubenswrapper[4875]: I1007 07:56:45.697101 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 07:56:45 crc kubenswrapper[4875]: E1007 07:56:45.697349 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 07:56:45 crc kubenswrapper[4875]: I1007 07:56:45.709509 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:45Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:45 crc kubenswrapper[4875]: I1007 07:56:45.722510 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-zk2kz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7021fc56-b485-4ca6-80e8-56665ade004f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d91377f30f2c7676df79d679bad49e0a162667214bd7e6ed4ad8ef3606a8752\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d91377f30f2c7676df79d679bad49e0a162667214bd7e6ed4ad8ef3606a8752\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecf0876b0824ab4fd9c692b0455050ac0a2d61663f74f13f578aebd6f3869f3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ecf0876b0824ab4fd9c692b0455050ac0a2d61663f74f13f578aebd6f3869f3f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9714afc60a059f6aa35d965ae1e15ea03e01cf9162a44d632ceed9d772e966bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9714afc60a059f6aa35d965ae1e15ea03e01cf9162a44d632ceed9d772e966bf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1f6109783534c4c80c81daf9eb0f04a9fbced0b143aeaa0c76683b9d2d38669\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c1f6109783534c4c80c81daf9eb0f04a9fbced0b143aeaa0c76683b9d2d38669\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c07a8fe8d39f3de33d3b132c33386a5c63a0948a1205f48e9a8788dd77ba8bc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c07a8fe8d39f3de33d3b132c33386a5c63a0948a1205f48e9a8788dd77ba8bc2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bce803064d5256265cdaf7f6e16d02780eab07d41d1f547dedd1c1b85972f543\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bce803064d5256265cdaf7f6e16d02780eab07d41d1f547dedd1c1b85972f543\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-zk2kz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:45Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:45 crc kubenswrapper[4875]: I1007 07:56:45.740325 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-8tcxj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f7806e48-48e7-4680-af2e-e93a05003370\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e5e9d96fa7102ca0dcbb0fcc156b21899217a32baae85be23379cca6c8a6a93c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2df6aadbe568d17b58738f194596f0e83432176769f7c5bd87e7d4000d3f1ed8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5eb9157d6f41a164c7cb952c983b441030a97a8f3b8009359440c5eba9846f26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8c31cbcb48e003dac873f108a5eb3aaff8d7016f7557fb683ae607d7819d7ceb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c944396320312ab92eb8dd7f1d771a00b965031717edbbeb2b467cee0788dec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6db9e77922c120dbfa1ccd55c96777ac3b18e0fcf8bb96956414858ae2c8fcff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f27830fee340a34b4ca9b87883890143fca897750ccc09daf0f801fe6d16123a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b3b1269130a0a1192546bec4aa5b2d4bfd65f381f6cd3948dd54d49b482e6d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e7cdfd8fed2f21e7063841e5382e668130288feffd52ef6fc5f9b5bc1894f2a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e7cdfd8fed2f21e7063841e5382e668130288feffd52ef6fc5f9b5bc1894f2a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-8tcxj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:45Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:45 crc kubenswrapper[4875]: I1007 07:56:45.750225 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ddx6l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0605d52-0cfc-4bcf-9218-1991257047cd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bdc7823b5b4f40adc245f9650864ebcb03f791dcbd6167f41c362bcdcc3a6655\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l9bwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:39Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ddx6l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:45Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:45 crc kubenswrapper[4875]: I1007 07:56:45.761090 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62ae4c53-7cf1-4229-84b4-045397dbcfba\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ae8881986c1b2abd841adfadbac7dc3d73096c5366485350536c08f478f9762\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://05f20f2103458077317614bcd5338d803c52c67e7dfc562103c817d33fa5bc79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f2213a20d36b8a125c6004957d5be402e0b85bbc2165ebd964ab499dfffb877\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://841f5c2218ad3912bf23dc2df80f24ca696829ba8e2c2d9722264688b25f0846\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:15Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:45Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:45 crc kubenswrapper[4875]: I1007 07:56:45.777822 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:45 crc kubenswrapper[4875]: I1007 07:56:45.777856 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:45 crc kubenswrapper[4875]: I1007 07:56:45.777867 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:45 crc kubenswrapper[4875]: I1007 07:56:45.777902 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:45 crc kubenswrapper[4875]: I1007 07:56:45.777915 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:45Z","lastTransitionTime":"2025-10-07T07:56:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:45 crc kubenswrapper[4875]: I1007 07:56:45.784278 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3527b3b9-1734-481d-9d80-9093e388afec\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7149243f8781d5a5e845821a8016bc06ec3678d69dfbde72d3429c48c64a98b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51d55feee1c9378afae0885cd063f39a2b4bd057db72c776941837d8d4098132\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6dc29635af9843109e3c200fc756d352e1a81b01dcc52a30a005291d1fa9f8d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e0b3573713f1b9d23177e5a1412d8db366ca718e6c19f6af9e234940505f2e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20382715fd9d79a3b60b16d950d6c4fe5b0d34ae14ad034f684d97762205263a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cfae59bc7d6183b2b5c6c04dd53b1bfc8b62abf90f67f74c7a56bf5039a5b50a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cfae59bc7d6183b2b5c6c04dd53b1bfc8b62abf90f67f74c7a56bf5039a5b50a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88968e9f639dc3664a87729128c531283b37b7c2da6aae70905f9f63a980fb54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88968e9f639dc3664a87729128c531283b37b7c2da6aae70905f9f63a980fb54\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://1ae0cbc996e8c2d24e8fbbf34b292d994bcc704c7b27fdc8b7fb7e64be0902ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1ae0cbc996e8c2d24e8fbbf34b292d994bcc704c7b27fdc8b7fb7e64be0902ca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:15Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:45Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:45 crc kubenswrapper[4875]: I1007 07:56:45.795845 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://455d92e6c0a43eabcbdc94940cb5bb0308a747e311fb79711b5ec8586d2f290c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e98da4809cdbbaa0bc7a3ed73df6eeb9260101f8336e16e3e93cba2be423f957\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:45Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:45 crc kubenswrapper[4875]: I1007 07:56:45.813097 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43e570e7b8391b0f38f3228dc0380ac8ee23e83f498989343bdee6a53c3ede41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:45Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:45 crc kubenswrapper[4875]: I1007 07:56:45.849413 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01fe5bea18c473e716831e2a9a876a8517d6de78b9b870929ecc1c4397d33fb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:45Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:45 crc kubenswrapper[4875]: I1007 07:56:45.872340 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wc2jq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e5a790e1-c591-4cfc-930f-4805a923790b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0fda9fd366641ca5799c0ca6c7aa926af6b41609e157c11a15b2077d4a5ce3aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4s782\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wc2jq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:45Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:45 crc kubenswrapper[4875]: I1007 07:56:45.880397 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:45 crc kubenswrapper[4875]: I1007 07:56:45.880434 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:45 crc kubenswrapper[4875]: I1007 07:56:45.880443 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:45 crc kubenswrapper[4875]: I1007 07:56:45.880457 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:45 crc kubenswrapper[4875]: I1007 07:56:45.880466 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:45Z","lastTransitionTime":"2025-10-07T07:56:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:45 crc kubenswrapper[4875]: I1007 07:56:45.883241 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3928c10c-c3da-41eb-96b2-629d67cfb31f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://27e5a1d7a65296f23b224f9ace2760c32b6e1ca146cf7a42a601de22632250d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qd4f6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34b235fca1c383e868641a953bb908901bcd10528ee9fb668c1f71eb6e0e978a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qd4f6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hx68m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:45Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:45 crc kubenswrapper[4875]: I1007 07:56:45.896300 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"01cd6f59-4f16-41d3-b43b-cd3cb9efd9b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ca1c70fa4c2dd9e87cc15766e27c2735768f392d019d6004ae50c94595651c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4bed5cf79847998d3d72c69b2f5773d64affb1a1f13ecc3af99bda8307bb8d1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ba6ab5ec6a70a00059ba119970c6b139879c3c568475f2acf45b00f4ade9e22\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://671f4d3e518bf0230af61a6bfc3f70b8cea7f0e8f972ad605a78cded0306b084\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://671f4d3e518bf0230af61a6bfc3f70b8cea7f0e8f972ad605a78cded0306b084\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"message\\\":\\\"le observer\\\\nW1007 07:56:35.379471 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1007 07:56:35.379613 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1007 07:56:35.380419 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1733405231/tls.crt::/tmp/serving-cert-1733405231/tls.key\\\\\\\"\\\\nI1007 07:56:35.960385 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1007 07:56:35.963357 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1007 07:56:35.963380 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1007 07:56:35.963401 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1007 07:56:35.963407 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1007 07:56:35.978178 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1007 07:56:35.978231 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1007 07:56:35.978242 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI1007 07:56:35.978236 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1007 07:56:35.978254 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1007 07:56:35.978304 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1007 07:56:35.978312 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1007 07:56:35.978319 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1007 07:56:35.979012 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0b72e1d82ba1cfc118ea122535db5c358f503a17b497c21c481e1b56bb578e8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec502d866371b40ecc3c8728889725cc1c2d9c8be18dbce17935992980fc1b9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec502d866371b40ecc3c8728889725cc1c2d9c8be18dbce17935992980fc1b9a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:45Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:45 crc kubenswrapper[4875]: I1007 07:56:45.908230 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:45Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:45 crc kubenswrapper[4875]: I1007 07:56:45.920941 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:45Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:45 crc kubenswrapper[4875]: I1007 07:56:45.931377 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-9tw9m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f8b02a8a-b8d2-4097-b768-132e4c46938a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f6efce0afada950650dfd1a7345a5132509ead755458e53915fcf3335ffe73e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-9tw9m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:45Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:45 crc kubenswrapper[4875]: I1007 07:56:45.944917 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-zk2kz" event={"ID":"7021fc56-b485-4ca6-80e8-56665ade004f","Type":"ContainerStarted","Data":"ff571e5d1552af3387066929e3c02a939848e65fbf853aec600b03e0f72d5d3a"} Oct 07 07:56:45 crc kubenswrapper[4875]: I1007 07:56:45.956363 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:45Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:45 crc kubenswrapper[4875]: I1007 07:56:45.969226 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43e570e7b8391b0f38f3228dc0380ac8ee23e83f498989343bdee6a53c3ede41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:45Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:45 crc kubenswrapper[4875]: I1007 07:56:45.982396 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:45 crc kubenswrapper[4875]: I1007 07:56:45.982426 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:45 crc kubenswrapper[4875]: I1007 07:56:45.982484 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:45 crc kubenswrapper[4875]: I1007 07:56:45.982500 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:45 crc kubenswrapper[4875]: I1007 07:56:45.982510 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:45Z","lastTransitionTime":"2025-10-07T07:56:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:45 crc kubenswrapper[4875]: I1007 07:56:45.983264 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01fe5bea18c473e716831e2a9a876a8517d6de78b9b870929ecc1c4397d33fb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:45Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:45 crc kubenswrapper[4875]: I1007 07:56:45.993294 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wc2jq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e5a790e1-c591-4cfc-930f-4805a923790b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0fda9fd366641ca5799c0ca6c7aa926af6b41609e157c11a15b2077d4a5ce3aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4s782\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wc2jq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:45Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:46 crc kubenswrapper[4875]: I1007 07:56:46.031959 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3928c10c-c3da-41eb-96b2-629d67cfb31f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://27e5a1d7a65296f23b224f9ace2760c32b6e1ca146cf7a42a601de22632250d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qd4f6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34b235fca1c383e868641a953bb908901bcd10528ee9fb668c1f71eb6e0e978a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qd4f6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hx68m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:46Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:46 crc kubenswrapper[4875]: I1007 07:56:46.072852 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"01cd6f59-4f16-41d3-b43b-cd3cb9efd9b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ca1c70fa4c2dd9e87cc15766e27c2735768f392d019d6004ae50c94595651c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4bed5cf79847998d3d72c69b2f5773d64affb1a1f13ecc3af99bda8307bb8d1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ba6ab5ec6a70a00059ba119970c6b139879c3c568475f2acf45b00f4ade9e22\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://671f4d3e518bf0230af61a6bfc3f70b8cea7f0e8f972ad605a78cded0306b084\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://671f4d3e518bf0230af61a6bfc3f70b8cea7f0e8f972ad605a78cded0306b084\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"message\\\":\\\"le observer\\\\nW1007 07:56:35.379471 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1007 07:56:35.379613 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1007 07:56:35.380419 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1733405231/tls.crt::/tmp/serving-cert-1733405231/tls.key\\\\\\\"\\\\nI1007 07:56:35.960385 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1007 07:56:35.963357 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1007 07:56:35.963380 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1007 07:56:35.963401 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1007 07:56:35.963407 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1007 07:56:35.978178 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1007 07:56:35.978231 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1007 07:56:35.978242 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI1007 07:56:35.978236 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1007 07:56:35.978254 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1007 07:56:35.978304 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1007 07:56:35.978312 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1007 07:56:35.978319 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1007 07:56:35.979012 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0b72e1d82ba1cfc118ea122535db5c358f503a17b497c21c481e1b56bb578e8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec502d866371b40ecc3c8728889725cc1c2d9c8be18dbce17935992980fc1b9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec502d866371b40ecc3c8728889725cc1c2d9c8be18dbce17935992980fc1b9a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:46Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:46 crc kubenswrapper[4875]: I1007 07:56:46.084288 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:46 crc kubenswrapper[4875]: I1007 07:56:46.084330 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:46 crc kubenswrapper[4875]: I1007 07:56:46.084339 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:46 crc kubenswrapper[4875]: I1007 07:56:46.084353 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:46 crc kubenswrapper[4875]: I1007 07:56:46.084362 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:46Z","lastTransitionTime":"2025-10-07T07:56:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:46 crc kubenswrapper[4875]: I1007 07:56:46.113334 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-9tw9m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f8b02a8a-b8d2-4097-b768-132e4c46938a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f6efce0afada950650dfd1a7345a5132509ead755458e53915fcf3335ffe73e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-9tw9m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:46Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:46 crc kubenswrapper[4875]: I1007 07:56:46.158556 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:46Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:46 crc kubenswrapper[4875]: I1007 07:56:46.186912 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:46 crc kubenswrapper[4875]: I1007 07:56:46.186963 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:46 crc kubenswrapper[4875]: I1007 07:56:46.186975 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:46 crc kubenswrapper[4875]: I1007 07:56:46.186992 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:46 crc kubenswrapper[4875]: I1007 07:56:46.187005 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:46Z","lastTransitionTime":"2025-10-07T07:56:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:46 crc kubenswrapper[4875]: I1007 07:56:46.196605 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:46Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:46 crc kubenswrapper[4875]: I1007 07:56:46.235393 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-zk2kz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7021fc56-b485-4ca6-80e8-56665ade004f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff571e5d1552af3387066929e3c02a939848e65fbf853aec600b03e0f72d5d3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d91377f30f2c7676df79d679bad49e0a162667214bd7e6ed4ad8ef3606a8752\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d91377f30f2c7676df79d679bad49e0a162667214bd7e6ed4ad8ef3606a8752\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecf0876b0824ab4fd9c692b0455050ac0a2d61663f74f13f578aebd6f3869f3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ecf0876b0824ab4fd9c692b0455050ac0a2d61663f74f13f578aebd6f3869f3f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9714afc60a059f6aa35d965ae1e15ea03e01cf9162a44d632ceed9d772e966bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9714afc60a059f6aa35d965ae1e15ea03e01cf9162a44d632ceed9d772e966bf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1f6109783534c4c80c81daf9eb0f04a9fbced0b143aeaa0c76683b9d2d38669\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c1f6109783534c4c80c81daf9eb0f04a9fbced0b143aeaa0c76683b9d2d38669\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c07a8fe8d39f3de33d3b132c33386a5c63a0948a1205f48e9a8788dd77ba8bc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c07a8fe8d39f3de33d3b132c33386a5c63a0948a1205f48e9a8788dd77ba8bc2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bce803064d5256265cdaf7f6e16d02780eab07d41d1f547dedd1c1b85972f543\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bce803064d5256265cdaf7f6e16d02780eab07d41d1f547dedd1c1b85972f543\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-zk2kz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:46Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:46 crc kubenswrapper[4875]: I1007 07:56:46.279346 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3527b3b9-1734-481d-9d80-9093e388afec\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7149243f8781d5a5e845821a8016bc06ec3678d69dfbde72d3429c48c64a98b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51d55feee1c9378afae0885cd063f39a2b4bd057db72c776941837d8d4098132\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6dc29635af9843109e3c200fc756d352e1a81b01dcc52a30a005291d1fa9f8d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e0b3573713f1b9d23177e5a1412d8db366ca718e6c19f6af9e234940505f2e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20382715fd9d79a3b60b16d950d6c4fe5b0d34ae14ad034f684d97762205263a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cfae59bc7d6183b2b5c6c04dd53b1bfc8b62abf90f67f74c7a56bf5039a5b50a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cfae59bc7d6183b2b5c6c04dd53b1bfc8b62abf90f67f74c7a56bf5039a5b50a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88968e9f639dc3664a87729128c531283b37b7c2da6aae70905f9f63a980fb54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88968e9f639dc3664a87729128c531283b37b7c2da6aae70905f9f63a980fb54\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://1ae0cbc996e8c2d24e8fbbf34b292d994bcc704c7b27fdc8b7fb7e64be0902ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1ae0cbc996e8c2d24e8fbbf34b292d994bcc704c7b27fdc8b7fb7e64be0902ca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:15Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:46Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:46 crc kubenswrapper[4875]: I1007 07:56:46.289516 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:46 crc kubenswrapper[4875]: I1007 07:56:46.289576 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:46 crc kubenswrapper[4875]: I1007 07:56:46.289587 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:46 crc kubenswrapper[4875]: I1007 07:56:46.289604 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:46 crc kubenswrapper[4875]: I1007 07:56:46.289616 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:46Z","lastTransitionTime":"2025-10-07T07:56:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:46 crc kubenswrapper[4875]: I1007 07:56:46.314320 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://455d92e6c0a43eabcbdc94940cb5bb0308a747e311fb79711b5ec8586d2f290c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e98da4809cdbbaa0bc7a3ed73df6eeb9260101f8336e16e3e93cba2be423f957\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:46Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:46 crc kubenswrapper[4875]: I1007 07:56:46.358381 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-8tcxj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f7806e48-48e7-4680-af2e-e93a05003370\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e5e9d96fa7102ca0dcbb0fcc156b21899217a32baae85be23379cca6c8a6a93c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2df6aadbe568d17b58738f194596f0e83432176769f7c5bd87e7d4000d3f1ed8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5eb9157d6f41a164c7cb952c983b441030a97a8f3b8009359440c5eba9846f26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8c31cbcb48e003dac873f108a5eb3aaff8d7016f7557fb683ae607d7819d7ceb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c944396320312ab92eb8dd7f1d771a00b965031717edbbeb2b467cee0788dec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6db9e77922c120dbfa1ccd55c96777ac3b18e0fcf8bb96956414858ae2c8fcff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f27830fee340a34b4ca9b87883890143fca897750ccc09daf0f801fe6d16123a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b3b1269130a0a1192546bec4aa5b2d4bfd65f381f6cd3948dd54d49b482e6d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e7cdfd8fed2f21e7063841e5382e668130288feffd52ef6fc5f9b5bc1894f2a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e7cdfd8fed2f21e7063841e5382e668130288feffd52ef6fc5f9b5bc1894f2a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-8tcxj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:46Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:46 crc kubenswrapper[4875]: I1007 07:56:46.390786 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ddx6l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0605d52-0cfc-4bcf-9218-1991257047cd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bdc7823b5b4f40adc245f9650864ebcb03f791dcbd6167f41c362bcdcc3a6655\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l9bwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:39Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ddx6l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:46Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:46 crc kubenswrapper[4875]: I1007 07:56:46.392209 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:46 crc kubenswrapper[4875]: I1007 07:56:46.392240 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:46 crc kubenswrapper[4875]: I1007 07:56:46.392253 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:46 crc kubenswrapper[4875]: I1007 07:56:46.392269 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:46 crc kubenswrapper[4875]: I1007 07:56:46.392280 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:46Z","lastTransitionTime":"2025-10-07T07:56:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:46 crc kubenswrapper[4875]: I1007 07:56:46.437615 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62ae4c53-7cf1-4229-84b4-045397dbcfba\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ae8881986c1b2abd841adfadbac7dc3d73096c5366485350536c08f478f9762\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://05f20f2103458077317614bcd5338d803c52c67e7dfc562103c817d33fa5bc79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f2213a20d36b8a125c6004957d5be402e0b85bbc2165ebd964ab499dfffb877\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://841f5c2218ad3912bf23dc2df80f24ca696829ba8e2c2d9722264688b25f0846\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:15Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:46Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:46 crc kubenswrapper[4875]: I1007 07:56:46.494560 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:46 crc kubenswrapper[4875]: I1007 07:56:46.494600 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:46 crc kubenswrapper[4875]: I1007 07:56:46.494609 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:46 crc kubenswrapper[4875]: I1007 07:56:46.494625 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:46 crc kubenswrapper[4875]: I1007 07:56:46.494636 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:46Z","lastTransitionTime":"2025-10-07T07:56:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:46 crc kubenswrapper[4875]: I1007 07:56:46.597698 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:46 crc kubenswrapper[4875]: I1007 07:56:46.597736 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:46 crc kubenswrapper[4875]: I1007 07:56:46.597747 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:46 crc kubenswrapper[4875]: I1007 07:56:46.597764 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:46 crc kubenswrapper[4875]: I1007 07:56:46.597782 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:46Z","lastTransitionTime":"2025-10-07T07:56:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:46 crc kubenswrapper[4875]: I1007 07:56:46.697219 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 07:56:46 crc kubenswrapper[4875]: E1007 07:56:46.697354 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 07:56:46 crc kubenswrapper[4875]: I1007 07:56:46.699661 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:46 crc kubenswrapper[4875]: I1007 07:56:46.699683 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:46 crc kubenswrapper[4875]: I1007 07:56:46.699692 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:46 crc kubenswrapper[4875]: I1007 07:56:46.699703 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:46 crc kubenswrapper[4875]: I1007 07:56:46.699713 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:46Z","lastTransitionTime":"2025-10-07T07:56:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:46 crc kubenswrapper[4875]: I1007 07:56:46.802508 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:46 crc kubenswrapper[4875]: I1007 07:56:46.802549 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:46 crc kubenswrapper[4875]: I1007 07:56:46.802561 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:46 crc kubenswrapper[4875]: I1007 07:56:46.802577 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:46 crc kubenswrapper[4875]: I1007 07:56:46.802588 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:46Z","lastTransitionTime":"2025-10-07T07:56:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:46 crc kubenswrapper[4875]: I1007 07:56:46.905516 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:46 crc kubenswrapper[4875]: I1007 07:56:46.905551 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:46 crc kubenswrapper[4875]: I1007 07:56:46.905562 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:46 crc kubenswrapper[4875]: I1007 07:56:46.905577 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:46 crc kubenswrapper[4875]: I1007 07:56:46.905586 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:46Z","lastTransitionTime":"2025-10-07T07:56:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:47 crc kubenswrapper[4875]: I1007 07:56:47.008602 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:47 crc kubenswrapper[4875]: I1007 07:56:47.008679 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:47 crc kubenswrapper[4875]: I1007 07:56:47.008698 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:47 crc kubenswrapper[4875]: I1007 07:56:47.008732 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:47 crc kubenswrapper[4875]: I1007 07:56:47.008756 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:47Z","lastTransitionTime":"2025-10-07T07:56:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:47 crc kubenswrapper[4875]: I1007 07:56:47.111410 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:47 crc kubenswrapper[4875]: I1007 07:56:47.111450 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:47 crc kubenswrapper[4875]: I1007 07:56:47.111462 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:47 crc kubenswrapper[4875]: I1007 07:56:47.111480 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:47 crc kubenswrapper[4875]: I1007 07:56:47.111491 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:47Z","lastTransitionTime":"2025-10-07T07:56:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:47 crc kubenswrapper[4875]: I1007 07:56:47.213346 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:47 crc kubenswrapper[4875]: I1007 07:56:47.213377 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:47 crc kubenswrapper[4875]: I1007 07:56:47.213384 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:47 crc kubenswrapper[4875]: I1007 07:56:47.213396 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:47 crc kubenswrapper[4875]: I1007 07:56:47.213405 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:47Z","lastTransitionTime":"2025-10-07T07:56:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:47 crc kubenswrapper[4875]: I1007 07:56:47.315942 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:47 crc kubenswrapper[4875]: I1007 07:56:47.316000 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:47 crc kubenswrapper[4875]: I1007 07:56:47.316017 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:47 crc kubenswrapper[4875]: I1007 07:56:47.316041 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:47 crc kubenswrapper[4875]: I1007 07:56:47.316059 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:47Z","lastTransitionTime":"2025-10-07T07:56:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:47 crc kubenswrapper[4875]: I1007 07:56:47.418340 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:47 crc kubenswrapper[4875]: I1007 07:56:47.418384 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:47 crc kubenswrapper[4875]: I1007 07:56:47.418394 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:47 crc kubenswrapper[4875]: I1007 07:56:47.418410 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:47 crc kubenswrapper[4875]: I1007 07:56:47.418422 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:47Z","lastTransitionTime":"2025-10-07T07:56:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:47 crc kubenswrapper[4875]: I1007 07:56:47.520845 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:47 crc kubenswrapper[4875]: I1007 07:56:47.521134 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:47 crc kubenswrapper[4875]: I1007 07:56:47.521237 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:47 crc kubenswrapper[4875]: I1007 07:56:47.521305 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:47 crc kubenswrapper[4875]: I1007 07:56:47.521363 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:47Z","lastTransitionTime":"2025-10-07T07:56:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:47 crc kubenswrapper[4875]: I1007 07:56:47.624629 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:47 crc kubenswrapper[4875]: I1007 07:56:47.624753 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:47 crc kubenswrapper[4875]: I1007 07:56:47.624778 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:47 crc kubenswrapper[4875]: I1007 07:56:47.624817 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:47 crc kubenswrapper[4875]: I1007 07:56:47.624840 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:47Z","lastTransitionTime":"2025-10-07T07:56:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:47 crc kubenswrapper[4875]: I1007 07:56:47.697594 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 07:56:47 crc kubenswrapper[4875]: I1007 07:56:47.697664 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 07:56:47 crc kubenswrapper[4875]: E1007 07:56:47.697760 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 07:56:47 crc kubenswrapper[4875]: E1007 07:56:47.698011 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 07:56:47 crc kubenswrapper[4875]: I1007 07:56:47.728986 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:47 crc kubenswrapper[4875]: I1007 07:56:47.729071 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:47 crc kubenswrapper[4875]: I1007 07:56:47.729100 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:47 crc kubenswrapper[4875]: I1007 07:56:47.729135 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:47 crc kubenswrapper[4875]: I1007 07:56:47.729162 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:47Z","lastTransitionTime":"2025-10-07T07:56:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:47 crc kubenswrapper[4875]: I1007 07:56:47.831391 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:47 crc kubenswrapper[4875]: I1007 07:56:47.831428 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:47 crc kubenswrapper[4875]: I1007 07:56:47.831438 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:47 crc kubenswrapper[4875]: I1007 07:56:47.831450 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:47 crc kubenswrapper[4875]: I1007 07:56:47.831460 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:47Z","lastTransitionTime":"2025-10-07T07:56:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:47 crc kubenswrapper[4875]: I1007 07:56:47.933810 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:47 crc kubenswrapper[4875]: I1007 07:56:47.933856 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:47 crc kubenswrapper[4875]: I1007 07:56:47.933869 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:47 crc kubenswrapper[4875]: I1007 07:56:47.933903 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:47 crc kubenswrapper[4875]: I1007 07:56:47.933918 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:47Z","lastTransitionTime":"2025-10-07T07:56:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:47 crc kubenswrapper[4875]: I1007 07:56:47.953482 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-8tcxj_f7806e48-48e7-4680-af2e-e93a05003370/ovnkube-controller/0.log" Oct 07 07:56:47 crc kubenswrapper[4875]: I1007 07:56:47.956653 4875 generic.go:334] "Generic (PLEG): container finished" podID="f7806e48-48e7-4680-af2e-e93a05003370" containerID="f27830fee340a34b4ca9b87883890143fca897750ccc09daf0f801fe6d16123a" exitCode=1 Oct 07 07:56:47 crc kubenswrapper[4875]: I1007 07:56:47.956726 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-8tcxj" event={"ID":"f7806e48-48e7-4680-af2e-e93a05003370","Type":"ContainerDied","Data":"f27830fee340a34b4ca9b87883890143fca897750ccc09daf0f801fe6d16123a"} Oct 07 07:56:47 crc kubenswrapper[4875]: I1007 07:56:47.957667 4875 scope.go:117] "RemoveContainer" containerID="f27830fee340a34b4ca9b87883890143fca897750ccc09daf0f801fe6d16123a" Oct 07 07:56:47 crc kubenswrapper[4875]: I1007 07:56:47.979103 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:47Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:47 crc kubenswrapper[4875]: I1007 07:56:47.994723 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-9tw9m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f8b02a8a-b8d2-4097-b768-132e4c46938a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f6efce0afada950650dfd1a7345a5132509ead755458e53915fcf3335ffe73e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-9tw9m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:47Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:48 crc kubenswrapper[4875]: I1007 07:56:48.015282 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:48Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:48 crc kubenswrapper[4875]: I1007 07:56:48.037389 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:48 crc kubenswrapper[4875]: I1007 07:56:48.037452 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:48 crc kubenswrapper[4875]: I1007 07:56:48.037472 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:48 crc kubenswrapper[4875]: I1007 07:56:48.037500 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:48 crc kubenswrapper[4875]: I1007 07:56:48.037525 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:48Z","lastTransitionTime":"2025-10-07T07:56:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:48 crc kubenswrapper[4875]: I1007 07:56:48.038029 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-zk2kz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7021fc56-b485-4ca6-80e8-56665ade004f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff571e5d1552af3387066929e3c02a939848e65fbf853aec600b03e0f72d5d3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d91377f30f2c7676df79d679bad49e0a162667214bd7e6ed4ad8ef3606a8752\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d91377f30f2c7676df79d679bad49e0a162667214bd7e6ed4ad8ef3606a8752\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecf0876b0824ab4fd9c692b0455050ac0a2d61663f74f13f578aebd6f3869f3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ecf0876b0824ab4fd9c692b0455050ac0a2d61663f74f13f578aebd6f3869f3f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9714afc60a059f6aa35d965ae1e15ea03e01cf9162a44d632ceed9d772e966bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9714afc60a059f6aa35d965ae1e15ea03e01cf9162a44d632ceed9d772e966bf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1f6109783534c4c80c81daf9eb0f04a9fbced0b143aeaa0c76683b9d2d38669\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c1f6109783534c4c80c81daf9eb0f04a9fbced0b143aeaa0c76683b9d2d38669\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c07a8fe8d39f3de33d3b132c33386a5c63a0948a1205f48e9a8788dd77ba8bc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c07a8fe8d39f3de33d3b132c33386a5c63a0948a1205f48e9a8788dd77ba8bc2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bce803064d5256265cdaf7f6e16d02780eab07d41d1f547dedd1c1b85972f543\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bce803064d5256265cdaf7f6e16d02780eab07d41d1f547dedd1c1b85972f543\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-zk2kz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:48Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:48 crc kubenswrapper[4875]: I1007 07:56:48.057068 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62ae4c53-7cf1-4229-84b4-045397dbcfba\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ae8881986c1b2abd841adfadbac7dc3d73096c5366485350536c08f478f9762\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://05f20f2103458077317614bcd5338d803c52c67e7dfc562103c817d33fa5bc79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f2213a20d36b8a125c6004957d5be402e0b85bbc2165ebd964ab499dfffb877\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://841f5c2218ad3912bf23dc2df80f24ca696829ba8e2c2d9722264688b25f0846\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:15Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:48Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:48 crc kubenswrapper[4875]: I1007 07:56:48.084433 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3527b3b9-1734-481d-9d80-9093e388afec\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7149243f8781d5a5e845821a8016bc06ec3678d69dfbde72d3429c48c64a98b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51d55feee1c9378afae0885cd063f39a2b4bd057db72c776941837d8d4098132\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6dc29635af9843109e3c200fc756d352e1a81b01dcc52a30a005291d1fa9f8d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e0b3573713f1b9d23177e5a1412d8db366ca718e6c19f6af9e234940505f2e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20382715fd9d79a3b60b16d950d6c4fe5b0d34ae14ad034f684d97762205263a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cfae59bc7d6183b2b5c6c04dd53b1bfc8b62abf90f67f74c7a56bf5039a5b50a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cfae59bc7d6183b2b5c6c04dd53b1bfc8b62abf90f67f74c7a56bf5039a5b50a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88968e9f639dc3664a87729128c531283b37b7c2da6aae70905f9f63a980fb54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88968e9f639dc3664a87729128c531283b37b7c2da6aae70905f9f63a980fb54\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://1ae0cbc996e8c2d24e8fbbf34b292d994bcc704c7b27fdc8b7fb7e64be0902ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1ae0cbc996e8c2d24e8fbbf34b292d994bcc704c7b27fdc8b7fb7e64be0902ca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:15Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:48Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:48 crc kubenswrapper[4875]: I1007 07:56:48.107209 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://455d92e6c0a43eabcbdc94940cb5bb0308a747e311fb79711b5ec8586d2f290c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e98da4809cdbbaa0bc7a3ed73df6eeb9260101f8336e16e3e93cba2be423f957\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:48Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:48 crc kubenswrapper[4875]: I1007 07:56:48.134696 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-8tcxj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f7806e48-48e7-4680-af2e-e93a05003370\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e5e9d96fa7102ca0dcbb0fcc156b21899217a32baae85be23379cca6c8a6a93c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2df6aadbe568d17b58738f194596f0e83432176769f7c5bd87e7d4000d3f1ed8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5eb9157d6f41a164c7cb952c983b441030a97a8f3b8009359440c5eba9846f26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8c31cbcb48e003dac873f108a5eb3aaff8d7016f7557fb683ae607d7819d7ceb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c944396320312ab92eb8dd7f1d771a00b965031717edbbeb2b467cee0788dec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6db9e77922c120dbfa1ccd55c96777ac3b18e0fcf8bb96956414858ae2c8fcff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f27830fee340a34b4ca9b87883890143fca897750ccc09daf0f801fe6d16123a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f27830fee340a34b4ca9b87883890143fca897750ccc09daf0f801fe6d16123a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-07T07:56:47Z\\\",\\\"message\\\":\\\"d (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1007 07:56:47.035259 6142 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1007 07:56:47.035301 6142 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1007 07:56:47.035336 6142 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1007 07:56:47.035368 6142 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1007 07:56:47.035374 6142 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1007 07:56:47.035385 6142 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1007 07:56:47.035390 6142 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1007 07:56:47.035428 6142 factory.go:656] Stopping watch factory\\\\nI1007 07:56:47.035440 6142 ovnkube.go:599] Stopped ovnkube\\\\nI1007 07:56:47.035463 6142 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1007 07:56:47.035473 6142 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1007 07:56:47.035481 6142 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1007 07:56:47.035486 6142 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1007 07:56:47.035492 6142 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1007 07:56:47.035497 6142 handler.go:208] Removed *v1.Node event handler 7\\\\nI10\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b3b1269130a0a1192546bec4aa5b2d4bfd65f381f6cd3948dd54d49b482e6d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e7cdfd8fed2f21e7063841e5382e668130288feffd52ef6fc5f9b5bc1894f2a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e7cdfd8fed2f21e7063841e5382e668130288feffd52ef6fc5f9b5bc1894f2a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-8tcxj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:48Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:48 crc kubenswrapper[4875]: I1007 07:56:48.141400 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:48 crc kubenswrapper[4875]: I1007 07:56:48.141438 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:48 crc kubenswrapper[4875]: I1007 07:56:48.141486 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:48 crc kubenswrapper[4875]: I1007 07:56:48.141511 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:48 crc kubenswrapper[4875]: I1007 07:56:48.141529 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:48Z","lastTransitionTime":"2025-10-07T07:56:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:48 crc kubenswrapper[4875]: I1007 07:56:48.149994 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ddx6l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0605d52-0cfc-4bcf-9218-1991257047cd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bdc7823b5b4f40adc245f9650864ebcb03f791dcbd6167f41c362bcdcc3a6655\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l9bwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:39Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ddx6l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:48Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:48 crc kubenswrapper[4875]: I1007 07:56:48.172799 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"01cd6f59-4f16-41d3-b43b-cd3cb9efd9b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ca1c70fa4c2dd9e87cc15766e27c2735768f392d019d6004ae50c94595651c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4bed5cf79847998d3d72c69b2f5773d64affb1a1f13ecc3af99bda8307bb8d1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ba6ab5ec6a70a00059ba119970c6b139879c3c568475f2acf45b00f4ade9e22\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://671f4d3e518bf0230af61a6bfc3f70b8cea7f0e8f972ad605a78cded0306b084\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://671f4d3e518bf0230af61a6bfc3f70b8cea7f0e8f972ad605a78cded0306b084\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"message\\\":\\\"le observer\\\\nW1007 07:56:35.379471 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1007 07:56:35.379613 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1007 07:56:35.380419 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1733405231/tls.crt::/tmp/serving-cert-1733405231/tls.key\\\\\\\"\\\\nI1007 07:56:35.960385 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1007 07:56:35.963357 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1007 07:56:35.963380 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1007 07:56:35.963401 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1007 07:56:35.963407 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1007 07:56:35.978178 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1007 07:56:35.978231 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1007 07:56:35.978242 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI1007 07:56:35.978236 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1007 07:56:35.978254 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1007 07:56:35.978304 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1007 07:56:35.978312 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1007 07:56:35.978319 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1007 07:56:35.979012 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0b72e1d82ba1cfc118ea122535db5c358f503a17b497c21c481e1b56bb578e8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec502d866371b40ecc3c8728889725cc1c2d9c8be18dbce17935992980fc1b9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec502d866371b40ecc3c8728889725cc1c2d9c8be18dbce17935992980fc1b9a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:48Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:48 crc kubenswrapper[4875]: I1007 07:56:48.191643 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:48Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:48 crc kubenswrapper[4875]: I1007 07:56:48.209197 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43e570e7b8391b0f38f3228dc0380ac8ee23e83f498989343bdee6a53c3ede41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:48Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:48 crc kubenswrapper[4875]: I1007 07:56:48.223968 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01fe5bea18c473e716831e2a9a876a8517d6de78b9b870929ecc1c4397d33fb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:48Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:48 crc kubenswrapper[4875]: I1007 07:56:48.239525 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wc2jq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e5a790e1-c591-4cfc-930f-4805a923790b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0fda9fd366641ca5799c0ca6c7aa926af6b41609e157c11a15b2077d4a5ce3aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4s782\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wc2jq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:48Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:48 crc kubenswrapper[4875]: I1007 07:56:48.245159 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:48 crc kubenswrapper[4875]: I1007 07:56:48.245242 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:48 crc kubenswrapper[4875]: I1007 07:56:48.245261 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:48 crc kubenswrapper[4875]: I1007 07:56:48.245286 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:48 crc kubenswrapper[4875]: I1007 07:56:48.245300 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:48Z","lastTransitionTime":"2025-10-07T07:56:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:48 crc kubenswrapper[4875]: I1007 07:56:48.256278 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3928c10c-c3da-41eb-96b2-629d67cfb31f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://27e5a1d7a65296f23b224f9ace2760c32b6e1ca146cf7a42a601de22632250d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qd4f6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34b235fca1c383e868641a953bb908901bcd10528ee9fb668c1f71eb6e0e978a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qd4f6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hx68m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:48Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:48 crc kubenswrapper[4875]: I1007 07:56:48.348377 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:48 crc kubenswrapper[4875]: I1007 07:56:48.348462 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:48 crc kubenswrapper[4875]: I1007 07:56:48.348482 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:48 crc kubenswrapper[4875]: I1007 07:56:48.348509 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:48 crc kubenswrapper[4875]: I1007 07:56:48.348526 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:48Z","lastTransitionTime":"2025-10-07T07:56:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:48 crc kubenswrapper[4875]: I1007 07:56:48.452779 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:48 crc kubenswrapper[4875]: I1007 07:56:48.452828 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:48 crc kubenswrapper[4875]: I1007 07:56:48.452840 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:48 crc kubenswrapper[4875]: I1007 07:56:48.452864 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:48 crc kubenswrapper[4875]: I1007 07:56:48.452888 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:48Z","lastTransitionTime":"2025-10-07T07:56:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:48 crc kubenswrapper[4875]: I1007 07:56:48.551523 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rbr4j"] Oct 07 07:56:48 crc kubenswrapper[4875]: I1007 07:56:48.552931 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rbr4j" Oct 07 07:56:48 crc kubenswrapper[4875]: I1007 07:56:48.560858 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Oct 07 07:56:48 crc kubenswrapper[4875]: I1007 07:56:48.563481 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Oct 07 07:56:48 crc kubenswrapper[4875]: I1007 07:56:48.564310 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:48 crc kubenswrapper[4875]: I1007 07:56:48.564384 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:48 crc kubenswrapper[4875]: I1007 07:56:48.564422 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:48 crc kubenswrapper[4875]: I1007 07:56:48.564446 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:48 crc kubenswrapper[4875]: I1007 07:56:48.564462 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:48Z","lastTransitionTime":"2025-10-07T07:56:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:48 crc kubenswrapper[4875]: I1007 07:56:48.580681 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:48Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:48 crc kubenswrapper[4875]: I1007 07:56:48.593477 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-9tw9m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f8b02a8a-b8d2-4097-b768-132e4c46938a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f6efce0afada950650dfd1a7345a5132509ead755458e53915fcf3335ffe73e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-9tw9m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:48Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:48 crc kubenswrapper[4875]: I1007 07:56:48.614833 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:48Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:48 crc kubenswrapper[4875]: I1007 07:56:48.636475 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-zk2kz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7021fc56-b485-4ca6-80e8-56665ade004f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff571e5d1552af3387066929e3c02a939848e65fbf853aec600b03e0f72d5d3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d91377f30f2c7676df79d679bad49e0a162667214bd7e6ed4ad8ef3606a8752\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d91377f30f2c7676df79d679bad49e0a162667214bd7e6ed4ad8ef3606a8752\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecf0876b0824ab4fd9c692b0455050ac0a2d61663f74f13f578aebd6f3869f3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ecf0876b0824ab4fd9c692b0455050ac0a2d61663f74f13f578aebd6f3869f3f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9714afc60a059f6aa35d965ae1e15ea03e01cf9162a44d632ceed9d772e966bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9714afc60a059f6aa35d965ae1e15ea03e01cf9162a44d632ceed9d772e966bf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1f6109783534c4c80c81daf9eb0f04a9fbced0b143aeaa0c76683b9d2d38669\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c1f6109783534c4c80c81daf9eb0f04a9fbced0b143aeaa0c76683b9d2d38669\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c07a8fe8d39f3de33d3b132c33386a5c63a0948a1205f48e9a8788dd77ba8bc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c07a8fe8d39f3de33d3b132c33386a5c63a0948a1205f48e9a8788dd77ba8bc2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bce803064d5256265cdaf7f6e16d02780eab07d41d1f547dedd1c1b85972f543\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bce803064d5256265cdaf7f6e16d02780eab07d41d1f547dedd1c1b85972f543\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-zk2kz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:48Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:48 crc kubenswrapper[4875]: I1007 07:56:48.653831 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62ae4c53-7cf1-4229-84b4-045397dbcfba\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ae8881986c1b2abd841adfadbac7dc3d73096c5366485350536c08f478f9762\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://05f20f2103458077317614bcd5338d803c52c67e7dfc562103c817d33fa5bc79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f2213a20d36b8a125c6004957d5be402e0b85bbc2165ebd964ab499dfffb877\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://841f5c2218ad3912bf23dc2df80f24ca696829ba8e2c2d9722264688b25f0846\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:15Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:48Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:48 crc kubenswrapper[4875]: I1007 07:56:48.660762 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q7bd9\" (UniqueName: \"kubernetes.io/projected/1d636971-3387-4f3c-b4a1-54a1da1e2fbe-kube-api-access-q7bd9\") pod \"ovnkube-control-plane-749d76644c-rbr4j\" (UID: \"1d636971-3387-4f3c-b4a1-54a1da1e2fbe\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rbr4j" Oct 07 07:56:48 crc kubenswrapper[4875]: I1007 07:56:48.660899 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/1d636971-3387-4f3c-b4a1-54a1da1e2fbe-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-rbr4j\" (UID: \"1d636971-3387-4f3c-b4a1-54a1da1e2fbe\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rbr4j" Oct 07 07:56:48 crc kubenswrapper[4875]: I1007 07:56:48.660925 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/1d636971-3387-4f3c-b4a1-54a1da1e2fbe-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-rbr4j\" (UID: \"1d636971-3387-4f3c-b4a1-54a1da1e2fbe\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rbr4j" Oct 07 07:56:48 crc kubenswrapper[4875]: I1007 07:56:48.660958 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/1d636971-3387-4f3c-b4a1-54a1da1e2fbe-env-overrides\") pod \"ovnkube-control-plane-749d76644c-rbr4j\" (UID: \"1d636971-3387-4f3c-b4a1-54a1da1e2fbe\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rbr4j" Oct 07 07:56:48 crc kubenswrapper[4875]: I1007 07:56:48.669850 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:48 crc kubenswrapper[4875]: I1007 07:56:48.669896 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:48 crc kubenswrapper[4875]: I1007 07:56:48.669912 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:48 crc kubenswrapper[4875]: I1007 07:56:48.669927 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:48 crc kubenswrapper[4875]: I1007 07:56:48.669936 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:48Z","lastTransitionTime":"2025-10-07T07:56:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:48 crc kubenswrapper[4875]: I1007 07:56:48.679066 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3527b3b9-1734-481d-9d80-9093e388afec\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7149243f8781d5a5e845821a8016bc06ec3678d69dfbde72d3429c48c64a98b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51d55feee1c9378afae0885cd063f39a2b4bd057db72c776941837d8d4098132\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6dc29635af9843109e3c200fc756d352e1a81b01dcc52a30a005291d1fa9f8d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e0b3573713f1b9d23177e5a1412d8db366ca718e6c19f6af9e234940505f2e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20382715fd9d79a3b60b16d950d6c4fe5b0d34ae14ad034f684d97762205263a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cfae59bc7d6183b2b5c6c04dd53b1bfc8b62abf90f67f74c7a56bf5039a5b50a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cfae59bc7d6183b2b5c6c04dd53b1bfc8b62abf90f67f74c7a56bf5039a5b50a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88968e9f639dc3664a87729128c531283b37b7c2da6aae70905f9f63a980fb54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88968e9f639dc3664a87729128c531283b37b7c2da6aae70905f9f63a980fb54\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://1ae0cbc996e8c2d24e8fbbf34b292d994bcc704c7b27fdc8b7fb7e64be0902ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1ae0cbc996e8c2d24e8fbbf34b292d994bcc704c7b27fdc8b7fb7e64be0902ca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:15Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:48Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:48 crc kubenswrapper[4875]: I1007 07:56:48.695300 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://455d92e6c0a43eabcbdc94940cb5bb0308a747e311fb79711b5ec8586d2f290c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e98da4809cdbbaa0bc7a3ed73df6eeb9260101f8336e16e3e93cba2be423f957\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:48Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:48 crc kubenswrapper[4875]: I1007 07:56:48.696436 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 07:56:48 crc kubenswrapper[4875]: E1007 07:56:48.696574 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 07:56:48 crc kubenswrapper[4875]: I1007 07:56:48.714672 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-8tcxj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f7806e48-48e7-4680-af2e-e93a05003370\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e5e9d96fa7102ca0dcbb0fcc156b21899217a32baae85be23379cca6c8a6a93c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2df6aadbe568d17b58738f194596f0e83432176769f7c5bd87e7d4000d3f1ed8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5eb9157d6f41a164c7cb952c983b441030a97a8f3b8009359440c5eba9846f26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8c31cbcb48e003dac873f108a5eb3aaff8d7016f7557fb683ae607d7819d7ceb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c944396320312ab92eb8dd7f1d771a00b965031717edbbeb2b467cee0788dec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6db9e77922c120dbfa1ccd55c96777ac3b18e0fcf8bb96956414858ae2c8fcff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f27830fee340a34b4ca9b87883890143fca897750ccc09daf0f801fe6d16123a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f27830fee340a34b4ca9b87883890143fca897750ccc09daf0f801fe6d16123a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-07T07:56:47Z\\\",\\\"message\\\":\\\"d (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1007 07:56:47.035259 6142 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1007 07:56:47.035301 6142 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1007 07:56:47.035336 6142 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1007 07:56:47.035368 6142 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1007 07:56:47.035374 6142 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1007 07:56:47.035385 6142 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1007 07:56:47.035390 6142 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1007 07:56:47.035428 6142 factory.go:656] Stopping watch factory\\\\nI1007 07:56:47.035440 6142 ovnkube.go:599] Stopped ovnkube\\\\nI1007 07:56:47.035463 6142 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1007 07:56:47.035473 6142 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1007 07:56:47.035481 6142 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1007 07:56:47.035486 6142 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1007 07:56:47.035492 6142 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1007 07:56:47.035497 6142 handler.go:208] Removed *v1.Node event handler 7\\\\nI10\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b3b1269130a0a1192546bec4aa5b2d4bfd65f381f6cd3948dd54d49b482e6d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e7cdfd8fed2f21e7063841e5382e668130288feffd52ef6fc5f9b5bc1894f2a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e7cdfd8fed2f21e7063841e5382e668130288feffd52ef6fc5f9b5bc1894f2a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-8tcxj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:48Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:48 crc kubenswrapper[4875]: I1007 07:56:48.725456 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ddx6l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0605d52-0cfc-4bcf-9218-1991257047cd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bdc7823b5b4f40adc245f9650864ebcb03f791dcbd6167f41c362bcdcc3a6655\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l9bwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:39Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ddx6l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:48Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:48 crc kubenswrapper[4875]: I1007 07:56:48.737372 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3928c10c-c3da-41eb-96b2-629d67cfb31f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://27e5a1d7a65296f23b224f9ace2760c32b6e1ca146cf7a42a601de22632250d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qd4f6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34b235fca1c383e868641a953bb908901bcd10528ee9fb668c1f71eb6e0e978a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qd4f6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hx68m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:48Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:48 crc kubenswrapper[4875]: I1007 07:56:48.752342 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rbr4j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1d636971-3387-4f3c-b4a1-54a1da1e2fbe\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7bd9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7bd9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:48Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-rbr4j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:48Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:48 crc kubenswrapper[4875]: I1007 07:56:48.761897 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q7bd9\" (UniqueName: \"kubernetes.io/projected/1d636971-3387-4f3c-b4a1-54a1da1e2fbe-kube-api-access-q7bd9\") pod \"ovnkube-control-plane-749d76644c-rbr4j\" (UID: \"1d636971-3387-4f3c-b4a1-54a1da1e2fbe\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rbr4j" Oct 07 07:56:48 crc kubenswrapper[4875]: I1007 07:56:48.761949 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/1d636971-3387-4f3c-b4a1-54a1da1e2fbe-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-rbr4j\" (UID: \"1d636971-3387-4f3c-b4a1-54a1da1e2fbe\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rbr4j" Oct 07 07:56:48 crc kubenswrapper[4875]: I1007 07:56:48.761971 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/1d636971-3387-4f3c-b4a1-54a1da1e2fbe-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-rbr4j\" (UID: \"1d636971-3387-4f3c-b4a1-54a1da1e2fbe\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rbr4j" Oct 07 07:56:48 crc kubenswrapper[4875]: I1007 07:56:48.762003 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/1d636971-3387-4f3c-b4a1-54a1da1e2fbe-env-overrides\") pod \"ovnkube-control-plane-749d76644c-rbr4j\" (UID: \"1d636971-3387-4f3c-b4a1-54a1da1e2fbe\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rbr4j" Oct 07 07:56:48 crc kubenswrapper[4875]: I1007 07:56:48.762580 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/1d636971-3387-4f3c-b4a1-54a1da1e2fbe-env-overrides\") pod \"ovnkube-control-plane-749d76644c-rbr4j\" (UID: \"1d636971-3387-4f3c-b4a1-54a1da1e2fbe\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rbr4j" Oct 07 07:56:48 crc kubenswrapper[4875]: I1007 07:56:48.763025 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/1d636971-3387-4f3c-b4a1-54a1da1e2fbe-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-rbr4j\" (UID: \"1d636971-3387-4f3c-b4a1-54a1da1e2fbe\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rbr4j" Oct 07 07:56:48 crc kubenswrapper[4875]: I1007 07:56:48.769012 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"01cd6f59-4f16-41d3-b43b-cd3cb9efd9b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ca1c70fa4c2dd9e87cc15766e27c2735768f392d019d6004ae50c94595651c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4bed5cf79847998d3d72c69b2f5773d64affb1a1f13ecc3af99bda8307bb8d1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ba6ab5ec6a70a00059ba119970c6b139879c3c568475f2acf45b00f4ade9e22\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://671f4d3e518bf0230af61a6bfc3f70b8cea7f0e8f972ad605a78cded0306b084\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://671f4d3e518bf0230af61a6bfc3f70b8cea7f0e8f972ad605a78cded0306b084\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"message\\\":\\\"le observer\\\\nW1007 07:56:35.379471 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1007 07:56:35.379613 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1007 07:56:35.380419 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1733405231/tls.crt::/tmp/serving-cert-1733405231/tls.key\\\\\\\"\\\\nI1007 07:56:35.960385 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1007 07:56:35.963357 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1007 07:56:35.963380 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1007 07:56:35.963401 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1007 07:56:35.963407 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1007 07:56:35.978178 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1007 07:56:35.978231 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1007 07:56:35.978242 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI1007 07:56:35.978236 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1007 07:56:35.978254 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1007 07:56:35.978304 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1007 07:56:35.978312 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1007 07:56:35.978319 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1007 07:56:35.979012 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0b72e1d82ba1cfc118ea122535db5c358f503a17b497c21c481e1b56bb578e8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec502d866371b40ecc3c8728889725cc1c2d9c8be18dbce17935992980fc1b9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec502d866371b40ecc3c8728889725cc1c2d9c8be18dbce17935992980fc1b9a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:48Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:48 crc kubenswrapper[4875]: I1007 07:56:48.772443 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/1d636971-3387-4f3c-b4a1-54a1da1e2fbe-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-rbr4j\" (UID: \"1d636971-3387-4f3c-b4a1-54a1da1e2fbe\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rbr4j" Oct 07 07:56:48 crc kubenswrapper[4875]: I1007 07:56:48.773617 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:48 crc kubenswrapper[4875]: I1007 07:56:48.773659 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:48 crc kubenswrapper[4875]: I1007 07:56:48.773669 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:48 crc kubenswrapper[4875]: I1007 07:56:48.773691 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:48 crc kubenswrapper[4875]: I1007 07:56:48.773703 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:48Z","lastTransitionTime":"2025-10-07T07:56:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:48 crc kubenswrapper[4875]: I1007 07:56:48.785766 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:48Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:48 crc kubenswrapper[4875]: I1007 07:56:48.792469 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q7bd9\" (UniqueName: \"kubernetes.io/projected/1d636971-3387-4f3c-b4a1-54a1da1e2fbe-kube-api-access-q7bd9\") pod \"ovnkube-control-plane-749d76644c-rbr4j\" (UID: \"1d636971-3387-4f3c-b4a1-54a1da1e2fbe\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rbr4j" Oct 07 07:56:48 crc kubenswrapper[4875]: I1007 07:56:48.801859 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43e570e7b8391b0f38f3228dc0380ac8ee23e83f498989343bdee6a53c3ede41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:48Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:48 crc kubenswrapper[4875]: I1007 07:56:48.828306 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01fe5bea18c473e716831e2a9a876a8517d6de78b9b870929ecc1c4397d33fb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:48Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:48 crc kubenswrapper[4875]: I1007 07:56:48.851806 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wc2jq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e5a790e1-c591-4cfc-930f-4805a923790b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0fda9fd366641ca5799c0ca6c7aa926af6b41609e157c11a15b2077d4a5ce3aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4s782\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wc2jq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:48Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:48 crc kubenswrapper[4875]: I1007 07:56:48.876298 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:48 crc kubenswrapper[4875]: I1007 07:56:48.876382 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:48 crc kubenswrapper[4875]: I1007 07:56:48.876403 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:48 crc kubenswrapper[4875]: I1007 07:56:48.876446 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:48 crc kubenswrapper[4875]: I1007 07:56:48.876466 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:48Z","lastTransitionTime":"2025-10-07T07:56:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:48 crc kubenswrapper[4875]: I1007 07:56:48.879448 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rbr4j" Oct 07 07:56:48 crc kubenswrapper[4875]: W1007 07:56:48.898710 4875 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1d636971_3387_4f3c_b4a1_54a1da1e2fbe.slice/crio-1947c3d25e1e9c77c97d91ae256a16ca6e0a47172bf2f5226a949d6307f0bc40 WatchSource:0}: Error finding container 1947c3d25e1e9c77c97d91ae256a16ca6e0a47172bf2f5226a949d6307f0bc40: Status 404 returned error can't find the container with id 1947c3d25e1e9c77c97d91ae256a16ca6e0a47172bf2f5226a949d6307f0bc40 Oct 07 07:56:48 crc kubenswrapper[4875]: I1007 07:56:48.963036 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-8tcxj_f7806e48-48e7-4680-af2e-e93a05003370/ovnkube-controller/0.log" Oct 07 07:56:48 crc kubenswrapper[4875]: I1007 07:56:48.967462 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-8tcxj" event={"ID":"f7806e48-48e7-4680-af2e-e93a05003370","Type":"ContainerStarted","Data":"66cb029e42151e4a186166d27962b933e0e0e9c110f07ffe602c276fb7f8f504"} Oct 07 07:56:48 crc kubenswrapper[4875]: I1007 07:56:48.968151 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-8tcxj" Oct 07 07:56:48 crc kubenswrapper[4875]: I1007 07:56:48.969201 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rbr4j" event={"ID":"1d636971-3387-4f3c-b4a1-54a1da1e2fbe","Type":"ContainerStarted","Data":"1947c3d25e1e9c77c97d91ae256a16ca6e0a47172bf2f5226a949d6307f0bc40"} Oct 07 07:56:48 crc kubenswrapper[4875]: I1007 07:56:48.980470 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:48 crc kubenswrapper[4875]: I1007 07:56:48.980536 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:48 crc kubenswrapper[4875]: I1007 07:56:48.980552 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:48 crc kubenswrapper[4875]: I1007 07:56:48.980577 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:48 crc kubenswrapper[4875]: I1007 07:56:48.980592 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:48Z","lastTransitionTime":"2025-10-07T07:56:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:48 crc kubenswrapper[4875]: I1007 07:56:48.987424 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:48Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:49 crc kubenswrapper[4875]: I1007 07:56:49.004867 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-zk2kz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7021fc56-b485-4ca6-80e8-56665ade004f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff571e5d1552af3387066929e3c02a939848e65fbf853aec600b03e0f72d5d3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d91377f30f2c7676df79d679bad49e0a162667214bd7e6ed4ad8ef3606a8752\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d91377f30f2c7676df79d679bad49e0a162667214bd7e6ed4ad8ef3606a8752\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecf0876b0824ab4fd9c692b0455050ac0a2d61663f74f13f578aebd6f3869f3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ecf0876b0824ab4fd9c692b0455050ac0a2d61663f74f13f578aebd6f3869f3f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9714afc60a059f6aa35d965ae1e15ea03e01cf9162a44d632ceed9d772e966bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9714afc60a059f6aa35d965ae1e15ea03e01cf9162a44d632ceed9d772e966bf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1f6109783534c4c80c81daf9eb0f04a9fbced0b143aeaa0c76683b9d2d38669\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c1f6109783534c4c80c81daf9eb0f04a9fbced0b143aeaa0c76683b9d2d38669\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c07a8fe8d39f3de33d3b132c33386a5c63a0948a1205f48e9a8788dd77ba8bc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c07a8fe8d39f3de33d3b132c33386a5c63a0948a1205f48e9a8788dd77ba8bc2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bce803064d5256265cdaf7f6e16d02780eab07d41d1f547dedd1c1b85972f543\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bce803064d5256265cdaf7f6e16d02780eab07d41d1f547dedd1c1b85972f543\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-zk2kz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:49Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:49 crc kubenswrapper[4875]: I1007 07:56:49.037996 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3527b3b9-1734-481d-9d80-9093e388afec\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7149243f8781d5a5e845821a8016bc06ec3678d69dfbde72d3429c48c64a98b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51d55feee1c9378afae0885cd063f39a2b4bd057db72c776941837d8d4098132\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6dc29635af9843109e3c200fc756d352e1a81b01dcc52a30a005291d1fa9f8d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e0b3573713f1b9d23177e5a1412d8db366ca718e6c19f6af9e234940505f2e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20382715fd9d79a3b60b16d950d6c4fe5b0d34ae14ad034f684d97762205263a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cfae59bc7d6183b2b5c6c04dd53b1bfc8b62abf90f67f74c7a56bf5039a5b50a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cfae59bc7d6183b2b5c6c04dd53b1bfc8b62abf90f67f74c7a56bf5039a5b50a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88968e9f639dc3664a87729128c531283b37b7c2da6aae70905f9f63a980fb54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88968e9f639dc3664a87729128c531283b37b7c2da6aae70905f9f63a980fb54\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://1ae0cbc996e8c2d24e8fbbf34b292d994bcc704c7b27fdc8b7fb7e64be0902ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1ae0cbc996e8c2d24e8fbbf34b292d994bcc704c7b27fdc8b7fb7e64be0902ca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:15Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:49Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:49 crc kubenswrapper[4875]: I1007 07:56:49.057670 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://455d92e6c0a43eabcbdc94940cb5bb0308a747e311fb79711b5ec8586d2f290c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e98da4809cdbbaa0bc7a3ed73df6eeb9260101f8336e16e3e93cba2be423f957\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:49Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:49 crc kubenswrapper[4875]: I1007 07:56:49.079566 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-8tcxj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f7806e48-48e7-4680-af2e-e93a05003370\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e5e9d96fa7102ca0dcbb0fcc156b21899217a32baae85be23379cca6c8a6a93c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2df6aadbe568d17b58738f194596f0e83432176769f7c5bd87e7d4000d3f1ed8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5eb9157d6f41a164c7cb952c983b441030a97a8f3b8009359440c5eba9846f26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8c31cbcb48e003dac873f108a5eb3aaff8d7016f7557fb683ae607d7819d7ceb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c944396320312ab92eb8dd7f1d771a00b965031717edbbeb2b467cee0788dec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6db9e77922c120dbfa1ccd55c96777ac3b18e0fcf8bb96956414858ae2c8fcff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://66cb029e42151e4a186166d27962b933e0e0e9c110f07ffe602c276fb7f8f504\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f27830fee340a34b4ca9b87883890143fca897750ccc09daf0f801fe6d16123a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-07T07:56:47Z\\\",\\\"message\\\":\\\"d (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1007 07:56:47.035259 6142 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1007 07:56:47.035301 6142 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1007 07:56:47.035336 6142 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1007 07:56:47.035368 6142 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1007 07:56:47.035374 6142 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1007 07:56:47.035385 6142 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1007 07:56:47.035390 6142 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1007 07:56:47.035428 6142 factory.go:656] Stopping watch factory\\\\nI1007 07:56:47.035440 6142 ovnkube.go:599] Stopped ovnkube\\\\nI1007 07:56:47.035463 6142 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1007 07:56:47.035473 6142 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1007 07:56:47.035481 6142 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1007 07:56:47.035486 6142 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1007 07:56:47.035492 6142 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1007 07:56:47.035497 6142 handler.go:208] Removed *v1.Node event handler 7\\\\nI10\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:44Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b3b1269130a0a1192546bec4aa5b2d4bfd65f381f6cd3948dd54d49b482e6d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e7cdfd8fed2f21e7063841e5382e668130288feffd52ef6fc5f9b5bc1894f2a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e7cdfd8fed2f21e7063841e5382e668130288feffd52ef6fc5f9b5bc1894f2a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-8tcxj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:49Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:49 crc kubenswrapper[4875]: I1007 07:56:49.086937 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:49 crc kubenswrapper[4875]: I1007 07:56:49.087008 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:49 crc kubenswrapper[4875]: I1007 07:56:49.087027 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:49 crc kubenswrapper[4875]: I1007 07:56:49.087058 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:49 crc kubenswrapper[4875]: I1007 07:56:49.087077 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:49Z","lastTransitionTime":"2025-10-07T07:56:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:49 crc kubenswrapper[4875]: I1007 07:56:49.092103 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ddx6l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0605d52-0cfc-4bcf-9218-1991257047cd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bdc7823b5b4f40adc245f9650864ebcb03f791dcbd6167f41c362bcdcc3a6655\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l9bwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:39Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ddx6l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:49Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:49 crc kubenswrapper[4875]: I1007 07:56:49.105920 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62ae4c53-7cf1-4229-84b4-045397dbcfba\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ae8881986c1b2abd841adfadbac7dc3d73096c5366485350536c08f478f9762\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://05f20f2103458077317614bcd5338d803c52c67e7dfc562103c817d33fa5bc79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f2213a20d36b8a125c6004957d5be402e0b85bbc2165ebd964ab499dfffb877\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://841f5c2218ad3912bf23dc2df80f24ca696829ba8e2c2d9722264688b25f0846\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:15Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:49Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:49 crc kubenswrapper[4875]: I1007 07:56:49.122517 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:49Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:49 crc kubenswrapper[4875]: I1007 07:56:49.144427 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43e570e7b8391b0f38f3228dc0380ac8ee23e83f498989343bdee6a53c3ede41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:49Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:49 crc kubenswrapper[4875]: I1007 07:56:49.165250 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01fe5bea18c473e716831e2a9a876a8517d6de78b9b870929ecc1c4397d33fb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:49Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:49 crc kubenswrapper[4875]: I1007 07:56:49.185831 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wc2jq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e5a790e1-c591-4cfc-930f-4805a923790b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0fda9fd366641ca5799c0ca6c7aa926af6b41609e157c11a15b2077d4a5ce3aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4s782\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wc2jq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:49Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:49 crc kubenswrapper[4875]: I1007 07:56:49.190056 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:49 crc kubenswrapper[4875]: I1007 07:56:49.190252 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:49 crc kubenswrapper[4875]: I1007 07:56:49.190397 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:49 crc kubenswrapper[4875]: I1007 07:56:49.190488 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:49 crc kubenswrapper[4875]: I1007 07:56:49.190548 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:49Z","lastTransitionTime":"2025-10-07T07:56:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:49 crc kubenswrapper[4875]: I1007 07:56:49.202309 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3928c10c-c3da-41eb-96b2-629d67cfb31f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://27e5a1d7a65296f23b224f9ace2760c32b6e1ca146cf7a42a601de22632250d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qd4f6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34b235fca1c383e868641a953bb908901bcd10528ee9fb668c1f71eb6e0e978a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qd4f6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hx68m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:49Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:49 crc kubenswrapper[4875]: I1007 07:56:49.221212 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rbr4j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1d636971-3387-4f3c-b4a1-54a1da1e2fbe\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7bd9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7bd9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:48Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-rbr4j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:49Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:49 crc kubenswrapper[4875]: I1007 07:56:49.248619 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"01cd6f59-4f16-41d3-b43b-cd3cb9efd9b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ca1c70fa4c2dd9e87cc15766e27c2735768f392d019d6004ae50c94595651c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4bed5cf79847998d3d72c69b2f5773d64affb1a1f13ecc3af99bda8307bb8d1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ba6ab5ec6a70a00059ba119970c6b139879c3c568475f2acf45b00f4ade9e22\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://671f4d3e518bf0230af61a6bfc3f70b8cea7f0e8f972ad605a78cded0306b084\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://671f4d3e518bf0230af61a6bfc3f70b8cea7f0e8f972ad605a78cded0306b084\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"message\\\":\\\"le observer\\\\nW1007 07:56:35.379471 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1007 07:56:35.379613 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1007 07:56:35.380419 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1733405231/tls.crt::/tmp/serving-cert-1733405231/tls.key\\\\\\\"\\\\nI1007 07:56:35.960385 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1007 07:56:35.963357 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1007 07:56:35.963380 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1007 07:56:35.963401 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1007 07:56:35.963407 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1007 07:56:35.978178 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1007 07:56:35.978231 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1007 07:56:35.978242 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI1007 07:56:35.978236 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1007 07:56:35.978254 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1007 07:56:35.978304 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1007 07:56:35.978312 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1007 07:56:35.978319 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1007 07:56:35.979012 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0b72e1d82ba1cfc118ea122535db5c358f503a17b497c21c481e1b56bb578e8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec502d866371b40ecc3c8728889725cc1c2d9c8be18dbce17935992980fc1b9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec502d866371b40ecc3c8728889725cc1c2d9c8be18dbce17935992980fc1b9a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:49Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:49 crc kubenswrapper[4875]: I1007 07:56:49.266424 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-9tw9m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f8b02a8a-b8d2-4097-b768-132e4c46938a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f6efce0afada950650dfd1a7345a5132509ead755458e53915fcf3335ffe73e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-9tw9m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:49Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:49 crc kubenswrapper[4875]: I1007 07:56:49.282783 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:49Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:49 crc kubenswrapper[4875]: I1007 07:56:49.293554 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:49 crc kubenswrapper[4875]: I1007 07:56:49.293793 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:49 crc kubenswrapper[4875]: I1007 07:56:49.293856 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:49 crc kubenswrapper[4875]: I1007 07:56:49.293935 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:49 crc kubenswrapper[4875]: I1007 07:56:49.293997 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:49Z","lastTransitionTime":"2025-10-07T07:56:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:49 crc kubenswrapper[4875]: I1007 07:56:49.396538 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:49 crc kubenswrapper[4875]: I1007 07:56:49.396602 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:49 crc kubenswrapper[4875]: I1007 07:56:49.396615 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:49 crc kubenswrapper[4875]: I1007 07:56:49.396643 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:49 crc kubenswrapper[4875]: I1007 07:56:49.396656 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:49Z","lastTransitionTime":"2025-10-07T07:56:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:49 crc kubenswrapper[4875]: I1007 07:56:49.499538 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:49 crc kubenswrapper[4875]: I1007 07:56:49.499580 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:49 crc kubenswrapper[4875]: I1007 07:56:49.499592 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:49 crc kubenswrapper[4875]: I1007 07:56:49.499607 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:49 crc kubenswrapper[4875]: I1007 07:56:49.499617 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:49Z","lastTransitionTime":"2025-10-07T07:56:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:49 crc kubenswrapper[4875]: I1007 07:56:49.603342 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:49 crc kubenswrapper[4875]: I1007 07:56:49.603425 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:49 crc kubenswrapper[4875]: I1007 07:56:49.603446 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:49 crc kubenswrapper[4875]: I1007 07:56:49.603487 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:49 crc kubenswrapper[4875]: I1007 07:56:49.603517 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:49Z","lastTransitionTime":"2025-10-07T07:56:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:49 crc kubenswrapper[4875]: I1007 07:56:49.697511 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 07:56:49 crc kubenswrapper[4875]: I1007 07:56:49.697601 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 07:56:49 crc kubenswrapper[4875]: E1007 07:56:49.697641 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 07:56:49 crc kubenswrapper[4875]: E1007 07:56:49.698037 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 07:56:49 crc kubenswrapper[4875]: I1007 07:56:49.706608 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:49 crc kubenswrapper[4875]: I1007 07:56:49.706657 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:49 crc kubenswrapper[4875]: I1007 07:56:49.706671 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:49 crc kubenswrapper[4875]: I1007 07:56:49.706688 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:49 crc kubenswrapper[4875]: I1007 07:56:49.706702 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:49Z","lastTransitionTime":"2025-10-07T07:56:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:49 crc kubenswrapper[4875]: I1007 07:56:49.810291 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:49 crc kubenswrapper[4875]: I1007 07:56:49.810341 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:49 crc kubenswrapper[4875]: I1007 07:56:49.810353 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:49 crc kubenswrapper[4875]: I1007 07:56:49.810370 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:49 crc kubenswrapper[4875]: I1007 07:56:49.810381 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:49Z","lastTransitionTime":"2025-10-07T07:56:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:49 crc kubenswrapper[4875]: I1007 07:56:49.913188 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:49 crc kubenswrapper[4875]: I1007 07:56:49.913250 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:49 crc kubenswrapper[4875]: I1007 07:56:49.913267 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:49 crc kubenswrapper[4875]: I1007 07:56:49.913293 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:49 crc kubenswrapper[4875]: I1007 07:56:49.913313 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:49Z","lastTransitionTime":"2025-10-07T07:56:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:49 crc kubenswrapper[4875]: I1007 07:56:49.976345 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-8tcxj_f7806e48-48e7-4680-af2e-e93a05003370/ovnkube-controller/1.log" Oct 07 07:56:49 crc kubenswrapper[4875]: I1007 07:56:49.977412 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-8tcxj_f7806e48-48e7-4680-af2e-e93a05003370/ovnkube-controller/0.log" Oct 07 07:56:49 crc kubenswrapper[4875]: I1007 07:56:49.981990 4875 generic.go:334] "Generic (PLEG): container finished" podID="f7806e48-48e7-4680-af2e-e93a05003370" containerID="66cb029e42151e4a186166d27962b933e0e0e9c110f07ffe602c276fb7f8f504" exitCode=1 Oct 07 07:56:49 crc kubenswrapper[4875]: I1007 07:56:49.982145 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-8tcxj" event={"ID":"f7806e48-48e7-4680-af2e-e93a05003370","Type":"ContainerDied","Data":"66cb029e42151e4a186166d27962b933e0e0e9c110f07ffe602c276fb7f8f504"} Oct 07 07:56:49 crc kubenswrapper[4875]: I1007 07:56:49.982740 4875 scope.go:117] "RemoveContainer" containerID="f27830fee340a34b4ca9b87883890143fca897750ccc09daf0f801fe6d16123a" Oct 07 07:56:49 crc kubenswrapper[4875]: I1007 07:56:49.989289 4875 scope.go:117] "RemoveContainer" containerID="66cb029e42151e4a186166d27962b933e0e0e9c110f07ffe602c276fb7f8f504" Oct 07 07:56:49 crc kubenswrapper[4875]: E1007 07:56:49.989568 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-8tcxj_openshift-ovn-kubernetes(f7806e48-48e7-4680-af2e-e93a05003370)\"" pod="openshift-ovn-kubernetes/ovnkube-node-8tcxj" podUID="f7806e48-48e7-4680-af2e-e93a05003370" Oct 07 07:56:49 crc kubenswrapper[4875]: I1007 07:56:49.991126 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rbr4j" event={"ID":"1d636971-3387-4f3c-b4a1-54a1da1e2fbe","Type":"ContainerStarted","Data":"12ef6aa200be5c0cc9b34435271b54e4852ad9118d7334d57e07c1f6f75100f2"} Oct 07 07:56:49 crc kubenswrapper[4875]: I1007 07:56:49.991162 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rbr4j" event={"ID":"1d636971-3387-4f3c-b4a1-54a1da1e2fbe","Type":"ContainerStarted","Data":"3c836128e294716bdbbc7bf57f81f71a88656e83be509510722d13b3f66e7d3d"} Oct 07 07:56:50 crc kubenswrapper[4875]: I1007 07:56:50.001856 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ddx6l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0605d52-0cfc-4bcf-9218-1991257047cd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bdc7823b5b4f40adc245f9650864ebcb03f791dcbd6167f41c362bcdcc3a6655\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l9bwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:39Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ddx6l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:49Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:50 crc kubenswrapper[4875]: I1007 07:56:50.016502 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:50 crc kubenswrapper[4875]: I1007 07:56:50.016559 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:50 crc kubenswrapper[4875]: I1007 07:56:50.016572 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:50 crc kubenswrapper[4875]: I1007 07:56:50.016597 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:50 crc kubenswrapper[4875]: I1007 07:56:50.016614 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:50Z","lastTransitionTime":"2025-10-07T07:56:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:50 crc kubenswrapper[4875]: I1007 07:56:50.022127 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62ae4c53-7cf1-4229-84b4-045397dbcfba\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ae8881986c1b2abd841adfadbac7dc3d73096c5366485350536c08f478f9762\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://05f20f2103458077317614bcd5338d803c52c67e7dfc562103c817d33fa5bc79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f2213a20d36b8a125c6004957d5be402e0b85bbc2165ebd964ab499dfffb877\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://841f5c2218ad3912bf23dc2df80f24ca696829ba8e2c2d9722264688b25f0846\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:15Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:50Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:50 crc kubenswrapper[4875]: I1007 07:56:50.050380 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3527b3b9-1734-481d-9d80-9093e388afec\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7149243f8781d5a5e845821a8016bc06ec3678d69dfbde72d3429c48c64a98b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51d55feee1c9378afae0885cd063f39a2b4bd057db72c776941837d8d4098132\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6dc29635af9843109e3c200fc756d352e1a81b01dcc52a30a005291d1fa9f8d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e0b3573713f1b9d23177e5a1412d8db366ca718e6c19f6af9e234940505f2e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20382715fd9d79a3b60b16d950d6c4fe5b0d34ae14ad034f684d97762205263a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cfae59bc7d6183b2b5c6c04dd53b1bfc8b62abf90f67f74c7a56bf5039a5b50a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cfae59bc7d6183b2b5c6c04dd53b1bfc8b62abf90f67f74c7a56bf5039a5b50a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88968e9f639dc3664a87729128c531283b37b7c2da6aae70905f9f63a980fb54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88968e9f639dc3664a87729128c531283b37b7c2da6aae70905f9f63a980fb54\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://1ae0cbc996e8c2d24e8fbbf34b292d994bcc704c7b27fdc8b7fb7e64be0902ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1ae0cbc996e8c2d24e8fbbf34b292d994bcc704c7b27fdc8b7fb7e64be0902ca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:15Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:50Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:50 crc kubenswrapper[4875]: I1007 07:56:50.071285 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://455d92e6c0a43eabcbdc94940cb5bb0308a747e311fb79711b5ec8586d2f290c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e98da4809cdbbaa0bc7a3ed73df6eeb9260101f8336e16e3e93cba2be423f957\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:50Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:50 crc kubenswrapper[4875]: I1007 07:56:50.106048 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-8tcxj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f7806e48-48e7-4680-af2e-e93a05003370\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e5e9d96fa7102ca0dcbb0fcc156b21899217a32baae85be23379cca6c8a6a93c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2df6aadbe568d17b58738f194596f0e83432176769f7c5bd87e7d4000d3f1ed8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5eb9157d6f41a164c7cb952c983b441030a97a8f3b8009359440c5eba9846f26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8c31cbcb48e003dac873f108a5eb3aaff8d7016f7557fb683ae607d7819d7ceb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c944396320312ab92eb8dd7f1d771a00b965031717edbbeb2b467cee0788dec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6db9e77922c120dbfa1ccd55c96777ac3b18e0fcf8bb96956414858ae2c8fcff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://66cb029e42151e4a186166d27962b933e0e0e9c110f07ffe602c276fb7f8f504\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f27830fee340a34b4ca9b87883890143fca897750ccc09daf0f801fe6d16123a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-07T07:56:47Z\\\",\\\"message\\\":\\\"d (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1007 07:56:47.035259 6142 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1007 07:56:47.035301 6142 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1007 07:56:47.035336 6142 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1007 07:56:47.035368 6142 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1007 07:56:47.035374 6142 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1007 07:56:47.035385 6142 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1007 07:56:47.035390 6142 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1007 07:56:47.035428 6142 factory.go:656] Stopping watch factory\\\\nI1007 07:56:47.035440 6142 ovnkube.go:599] Stopped ovnkube\\\\nI1007 07:56:47.035463 6142 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1007 07:56:47.035473 6142 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1007 07:56:47.035481 6142 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1007 07:56:47.035486 6142 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1007 07:56:47.035492 6142 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1007 07:56:47.035497 6142 handler.go:208] Removed *v1.Node event handler 7\\\\nI10\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:44Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://66cb029e42151e4a186166d27962b933e0e0e9c110f07ffe602c276fb7f8f504\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-07T07:56:49Z\\\",\\\"message\\\":\\\"ReadOnlyMounts:*true,UserNamespaces:*true,},},NodeRuntimeHandler{Name:,Features:\\\\u0026NodeRuntimeHandlerFeatures{RecursiveReadOnlyMounts:*true,UserNamespaces:*true,},},NodeRuntimeHandler{Name:runc,Features:\\\\u0026NodeRuntimeHandlerFeatures{RecursiveReadOnlyMounts:*true,UserNamespaces:*false,},},},Features:nil,},}\\\\nI1007 07:56:48.894892 6286 egressqos.go:1009] Finished syncing EgressQoS node crc : 744.984µs\\\\nI1007 07:56:48.895270 6286 handler.go:208] Removed *v1.Node event handler 2\\\\nI1007 07:56:48.895720 6286 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1007 07:56:48.895756 6286 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1007 07:56:48.895762 6286 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1007 07:56:48.895780 6286 factory.go:656] Stopping watch factory\\\\nI1007 07:56:48.895802 6286 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1007 07:56:48.895817 6286 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1007 07:56:48.895825 6286 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1007 07:56:48.897786 6286 ovnkube.go:599] Stopped ovnkube\\\\nI1007 07:56:48.897999 6286 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1007 07:56:48.898118 6286 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b3b1269130a0a1192546bec4aa5b2d4bfd65f381f6cd3948dd54d49b482e6d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e7cdfd8fed2f21e7063841e5382e668130288feffd52ef6fc5f9b5bc1894f2a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e7cdfd8fed2f21e7063841e5382e668130288feffd52ef6fc5f9b5bc1894f2a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-8tcxj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:50Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:50 crc kubenswrapper[4875]: I1007 07:56:50.119651 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:50 crc kubenswrapper[4875]: I1007 07:56:50.119692 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:50 crc kubenswrapper[4875]: I1007 07:56:50.119702 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:50 crc kubenswrapper[4875]: I1007 07:56:50.119720 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:50 crc kubenswrapper[4875]: I1007 07:56:50.119731 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:50Z","lastTransitionTime":"2025-10-07T07:56:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:50 crc kubenswrapper[4875]: I1007 07:56:50.125128 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01fe5bea18c473e716831e2a9a876a8517d6de78b9b870929ecc1c4397d33fb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:50Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:50 crc kubenswrapper[4875]: I1007 07:56:50.141086 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wc2jq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e5a790e1-c591-4cfc-930f-4805a923790b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0fda9fd366641ca5799c0ca6c7aa926af6b41609e157c11a15b2077d4a5ce3aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4s782\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wc2jq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:50Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:50 crc kubenswrapper[4875]: I1007 07:56:50.154318 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3928c10c-c3da-41eb-96b2-629d67cfb31f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://27e5a1d7a65296f23b224f9ace2760c32b6e1ca146cf7a42a601de22632250d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qd4f6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34b235fca1c383e868641a953bb908901bcd10528ee9fb668c1f71eb6e0e978a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qd4f6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hx68m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:50Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:50 crc kubenswrapper[4875]: I1007 07:56:50.168840 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rbr4j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1d636971-3387-4f3c-b4a1-54a1da1e2fbe\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7bd9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7bd9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:48Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-rbr4j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:50Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:50 crc kubenswrapper[4875]: I1007 07:56:50.187761 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"01cd6f59-4f16-41d3-b43b-cd3cb9efd9b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ca1c70fa4c2dd9e87cc15766e27c2735768f392d019d6004ae50c94595651c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4bed5cf79847998d3d72c69b2f5773d64affb1a1f13ecc3af99bda8307bb8d1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ba6ab5ec6a70a00059ba119970c6b139879c3c568475f2acf45b00f4ade9e22\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://671f4d3e518bf0230af61a6bfc3f70b8cea7f0e8f972ad605a78cded0306b084\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://671f4d3e518bf0230af61a6bfc3f70b8cea7f0e8f972ad605a78cded0306b084\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"message\\\":\\\"le observer\\\\nW1007 07:56:35.379471 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1007 07:56:35.379613 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1007 07:56:35.380419 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1733405231/tls.crt::/tmp/serving-cert-1733405231/tls.key\\\\\\\"\\\\nI1007 07:56:35.960385 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1007 07:56:35.963357 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1007 07:56:35.963380 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1007 07:56:35.963401 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1007 07:56:35.963407 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1007 07:56:35.978178 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1007 07:56:35.978231 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1007 07:56:35.978242 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI1007 07:56:35.978236 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1007 07:56:35.978254 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1007 07:56:35.978304 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1007 07:56:35.978312 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1007 07:56:35.978319 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1007 07:56:35.979012 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0b72e1d82ba1cfc118ea122535db5c358f503a17b497c21c481e1b56bb578e8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec502d866371b40ecc3c8728889725cc1c2d9c8be18dbce17935992980fc1b9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec502d866371b40ecc3c8728889725cc1c2d9c8be18dbce17935992980fc1b9a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:50Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:50 crc kubenswrapper[4875]: I1007 07:56:50.202492 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:50Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:50 crc kubenswrapper[4875]: I1007 07:56:50.219969 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43e570e7b8391b0f38f3228dc0380ac8ee23e83f498989343bdee6a53c3ede41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:50Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:50 crc kubenswrapper[4875]: I1007 07:56:50.222411 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:50 crc kubenswrapper[4875]: I1007 07:56:50.222437 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:50 crc kubenswrapper[4875]: I1007 07:56:50.222449 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:50 crc kubenswrapper[4875]: I1007 07:56:50.222467 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:50 crc kubenswrapper[4875]: I1007 07:56:50.222482 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:50Z","lastTransitionTime":"2025-10-07T07:56:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:50 crc kubenswrapper[4875]: I1007 07:56:50.236073 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:50Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:50 crc kubenswrapper[4875]: I1007 07:56:50.249612 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-9tw9m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f8b02a8a-b8d2-4097-b768-132e4c46938a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f6efce0afada950650dfd1a7345a5132509ead755458e53915fcf3335ffe73e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-9tw9m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:50Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:50 crc kubenswrapper[4875]: I1007 07:56:50.270365 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:50Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:50 crc kubenswrapper[4875]: I1007 07:56:50.288829 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-zk2kz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7021fc56-b485-4ca6-80e8-56665ade004f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff571e5d1552af3387066929e3c02a939848e65fbf853aec600b03e0f72d5d3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d91377f30f2c7676df79d679bad49e0a162667214bd7e6ed4ad8ef3606a8752\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d91377f30f2c7676df79d679bad49e0a162667214bd7e6ed4ad8ef3606a8752\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecf0876b0824ab4fd9c692b0455050ac0a2d61663f74f13f578aebd6f3869f3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ecf0876b0824ab4fd9c692b0455050ac0a2d61663f74f13f578aebd6f3869f3f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9714afc60a059f6aa35d965ae1e15ea03e01cf9162a44d632ceed9d772e966bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9714afc60a059f6aa35d965ae1e15ea03e01cf9162a44d632ceed9d772e966bf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1f6109783534c4c80c81daf9eb0f04a9fbced0b143aeaa0c76683b9d2d38669\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c1f6109783534c4c80c81daf9eb0f04a9fbced0b143aeaa0c76683b9d2d38669\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c07a8fe8d39f3de33d3b132c33386a5c63a0948a1205f48e9a8788dd77ba8bc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c07a8fe8d39f3de33d3b132c33386a5c63a0948a1205f48e9a8788dd77ba8bc2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bce803064d5256265cdaf7f6e16d02780eab07d41d1f547dedd1c1b85972f543\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bce803064d5256265cdaf7f6e16d02780eab07d41d1f547dedd1c1b85972f543\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-zk2kz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:50Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:50 crc kubenswrapper[4875]: I1007 07:56:50.305075 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3928c10c-c3da-41eb-96b2-629d67cfb31f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://27e5a1d7a65296f23b224f9ace2760c32b6e1ca146cf7a42a601de22632250d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qd4f6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34b235fca1c383e868641a953bb908901bcd10528ee9fb668c1f71eb6e0e978a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qd4f6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hx68m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:50Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:50 crc kubenswrapper[4875]: I1007 07:56:50.318816 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rbr4j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1d636971-3387-4f3c-b4a1-54a1da1e2fbe\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c836128e294716bdbbc7bf57f81f71a88656e83be509510722d13b3f66e7d3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7bd9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://12ef6aa200be5c0cc9b34435271b54e4852ad9118d7334d57e07c1f6f75100f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7bd9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:48Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-rbr4j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:50Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:50 crc kubenswrapper[4875]: I1007 07:56:50.325568 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:50 crc kubenswrapper[4875]: I1007 07:56:50.325656 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:50 crc kubenswrapper[4875]: I1007 07:56:50.325679 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:50 crc kubenswrapper[4875]: I1007 07:56:50.326219 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:50 crc kubenswrapper[4875]: I1007 07:56:50.326250 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:50Z","lastTransitionTime":"2025-10-07T07:56:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:50 crc kubenswrapper[4875]: I1007 07:56:50.340945 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"01cd6f59-4f16-41d3-b43b-cd3cb9efd9b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ca1c70fa4c2dd9e87cc15766e27c2735768f392d019d6004ae50c94595651c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4bed5cf79847998d3d72c69b2f5773d64affb1a1f13ecc3af99bda8307bb8d1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ba6ab5ec6a70a00059ba119970c6b139879c3c568475f2acf45b00f4ade9e22\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://671f4d3e518bf0230af61a6bfc3f70b8cea7f0e8f972ad605a78cded0306b084\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://671f4d3e518bf0230af61a6bfc3f70b8cea7f0e8f972ad605a78cded0306b084\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"message\\\":\\\"le observer\\\\nW1007 07:56:35.379471 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1007 07:56:35.379613 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1007 07:56:35.380419 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1733405231/tls.crt::/tmp/serving-cert-1733405231/tls.key\\\\\\\"\\\\nI1007 07:56:35.960385 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1007 07:56:35.963357 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1007 07:56:35.963380 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1007 07:56:35.963401 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1007 07:56:35.963407 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1007 07:56:35.978178 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1007 07:56:35.978231 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1007 07:56:35.978242 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI1007 07:56:35.978236 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1007 07:56:35.978254 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1007 07:56:35.978304 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1007 07:56:35.978312 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1007 07:56:35.978319 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1007 07:56:35.979012 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0b72e1d82ba1cfc118ea122535db5c358f503a17b497c21c481e1b56bb578e8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec502d866371b40ecc3c8728889725cc1c2d9c8be18dbce17935992980fc1b9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec502d866371b40ecc3c8728889725cc1c2d9c8be18dbce17935992980fc1b9a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:50Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:50 crc kubenswrapper[4875]: I1007 07:56:50.358124 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:50Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:50 crc kubenswrapper[4875]: I1007 07:56:50.372053 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43e570e7b8391b0f38f3228dc0380ac8ee23e83f498989343bdee6a53c3ede41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:50Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:50 crc kubenswrapper[4875]: I1007 07:56:50.388353 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01fe5bea18c473e716831e2a9a876a8517d6de78b9b870929ecc1c4397d33fb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:50Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:50 crc kubenswrapper[4875]: I1007 07:56:50.402657 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wc2jq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e5a790e1-c591-4cfc-930f-4805a923790b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0fda9fd366641ca5799c0ca6c7aa926af6b41609e157c11a15b2077d4a5ce3aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4s782\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wc2jq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:50Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:50 crc kubenswrapper[4875]: I1007 07:56:50.422732 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:50Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:50 crc kubenswrapper[4875]: I1007 07:56:50.429497 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:50 crc kubenswrapper[4875]: I1007 07:56:50.429536 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:50 crc kubenswrapper[4875]: I1007 07:56:50.429546 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:50 crc kubenswrapper[4875]: I1007 07:56:50.429561 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:50 crc kubenswrapper[4875]: I1007 07:56:50.429572 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:50Z","lastTransitionTime":"2025-10-07T07:56:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:50 crc kubenswrapper[4875]: I1007 07:56:50.436850 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-9tw9m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f8b02a8a-b8d2-4097-b768-132e4c46938a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f6efce0afada950650dfd1a7345a5132509ead755458e53915fcf3335ffe73e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-9tw9m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:50Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:50 crc kubenswrapper[4875]: I1007 07:56:50.457925 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:50Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:50 crc kubenswrapper[4875]: I1007 07:56:50.481677 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-zk2kz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7021fc56-b485-4ca6-80e8-56665ade004f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff571e5d1552af3387066929e3c02a939848e65fbf853aec600b03e0f72d5d3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d91377f30f2c7676df79d679bad49e0a162667214bd7e6ed4ad8ef3606a8752\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d91377f30f2c7676df79d679bad49e0a162667214bd7e6ed4ad8ef3606a8752\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecf0876b0824ab4fd9c692b0455050ac0a2d61663f74f13f578aebd6f3869f3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ecf0876b0824ab4fd9c692b0455050ac0a2d61663f74f13f578aebd6f3869f3f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9714afc60a059f6aa35d965ae1e15ea03e01cf9162a44d632ceed9d772e966bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9714afc60a059f6aa35d965ae1e15ea03e01cf9162a44d632ceed9d772e966bf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1f6109783534c4c80c81daf9eb0f04a9fbced0b143aeaa0c76683b9d2d38669\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c1f6109783534c4c80c81daf9eb0f04a9fbced0b143aeaa0c76683b9d2d38669\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c07a8fe8d39f3de33d3b132c33386a5c63a0948a1205f48e9a8788dd77ba8bc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c07a8fe8d39f3de33d3b132c33386a5c63a0948a1205f48e9a8788dd77ba8bc2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bce803064d5256265cdaf7f6e16d02780eab07d41d1f547dedd1c1b85972f543\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bce803064d5256265cdaf7f6e16d02780eab07d41d1f547dedd1c1b85972f543\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-zk2kz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:50Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:50 crc kubenswrapper[4875]: I1007 07:56:50.500314 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62ae4c53-7cf1-4229-84b4-045397dbcfba\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ae8881986c1b2abd841adfadbac7dc3d73096c5366485350536c08f478f9762\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://05f20f2103458077317614bcd5338d803c52c67e7dfc562103c817d33fa5bc79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f2213a20d36b8a125c6004957d5be402e0b85bbc2165ebd964ab499dfffb877\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://841f5c2218ad3912bf23dc2df80f24ca696829ba8e2c2d9722264688b25f0846\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:15Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:50Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:50 crc kubenswrapper[4875]: I1007 07:56:50.533550 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:50 crc kubenswrapper[4875]: I1007 07:56:50.533624 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:50 crc kubenswrapper[4875]: I1007 07:56:50.533648 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:50 crc kubenswrapper[4875]: I1007 07:56:50.533687 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:50 crc kubenswrapper[4875]: I1007 07:56:50.533712 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:50Z","lastTransitionTime":"2025-10-07T07:56:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:50 crc kubenswrapper[4875]: I1007 07:56:50.538732 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3527b3b9-1734-481d-9d80-9093e388afec\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7149243f8781d5a5e845821a8016bc06ec3678d69dfbde72d3429c48c64a98b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51d55feee1c9378afae0885cd063f39a2b4bd057db72c776941837d8d4098132\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6dc29635af9843109e3c200fc756d352e1a81b01dcc52a30a005291d1fa9f8d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e0b3573713f1b9d23177e5a1412d8db366ca718e6c19f6af9e234940505f2e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20382715fd9d79a3b60b16d950d6c4fe5b0d34ae14ad034f684d97762205263a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cfae59bc7d6183b2b5c6c04dd53b1bfc8b62abf90f67f74c7a56bf5039a5b50a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cfae59bc7d6183b2b5c6c04dd53b1bfc8b62abf90f67f74c7a56bf5039a5b50a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88968e9f639dc3664a87729128c531283b37b7c2da6aae70905f9f63a980fb54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88968e9f639dc3664a87729128c531283b37b7c2da6aae70905f9f63a980fb54\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://1ae0cbc996e8c2d24e8fbbf34b292d994bcc704c7b27fdc8b7fb7e64be0902ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1ae0cbc996e8c2d24e8fbbf34b292d994bcc704c7b27fdc8b7fb7e64be0902ca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:15Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:50Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:50 crc kubenswrapper[4875]: I1007 07:56:50.562724 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://455d92e6c0a43eabcbdc94940cb5bb0308a747e311fb79711b5ec8586d2f290c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e98da4809cdbbaa0bc7a3ed73df6eeb9260101f8336e16e3e93cba2be423f957\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:50Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:50 crc kubenswrapper[4875]: I1007 07:56:50.595271 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-8tcxj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f7806e48-48e7-4680-af2e-e93a05003370\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e5e9d96fa7102ca0dcbb0fcc156b21899217a32baae85be23379cca6c8a6a93c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2df6aadbe568d17b58738f194596f0e83432176769f7c5bd87e7d4000d3f1ed8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5eb9157d6f41a164c7cb952c983b441030a97a8f3b8009359440c5eba9846f26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8c31cbcb48e003dac873f108a5eb3aaff8d7016f7557fb683ae607d7819d7ceb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c944396320312ab92eb8dd7f1d771a00b965031717edbbeb2b467cee0788dec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6db9e77922c120dbfa1ccd55c96777ac3b18e0fcf8bb96956414858ae2c8fcff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://66cb029e42151e4a186166d27962b933e0e0e9c110f07ffe602c276fb7f8f504\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f27830fee340a34b4ca9b87883890143fca897750ccc09daf0f801fe6d16123a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-07T07:56:47Z\\\",\\\"message\\\":\\\"d (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1007 07:56:47.035259 6142 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1007 07:56:47.035301 6142 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1007 07:56:47.035336 6142 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1007 07:56:47.035368 6142 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1007 07:56:47.035374 6142 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1007 07:56:47.035385 6142 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1007 07:56:47.035390 6142 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1007 07:56:47.035428 6142 factory.go:656] Stopping watch factory\\\\nI1007 07:56:47.035440 6142 ovnkube.go:599] Stopped ovnkube\\\\nI1007 07:56:47.035463 6142 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1007 07:56:47.035473 6142 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1007 07:56:47.035481 6142 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1007 07:56:47.035486 6142 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1007 07:56:47.035492 6142 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1007 07:56:47.035497 6142 handler.go:208] Removed *v1.Node event handler 7\\\\nI10\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:44Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://66cb029e42151e4a186166d27962b933e0e0e9c110f07ffe602c276fb7f8f504\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-07T07:56:49Z\\\",\\\"message\\\":\\\"ReadOnlyMounts:*true,UserNamespaces:*true,},},NodeRuntimeHandler{Name:,Features:\\\\u0026NodeRuntimeHandlerFeatures{RecursiveReadOnlyMounts:*true,UserNamespaces:*true,},},NodeRuntimeHandler{Name:runc,Features:\\\\u0026NodeRuntimeHandlerFeatures{RecursiveReadOnlyMounts:*true,UserNamespaces:*false,},},},Features:nil,},}\\\\nI1007 07:56:48.894892 6286 egressqos.go:1009] Finished syncing EgressQoS node crc : 744.984µs\\\\nI1007 07:56:48.895270 6286 handler.go:208] Removed *v1.Node event handler 2\\\\nI1007 07:56:48.895720 6286 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1007 07:56:48.895756 6286 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1007 07:56:48.895762 6286 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1007 07:56:48.895780 6286 factory.go:656] Stopping watch factory\\\\nI1007 07:56:48.895802 6286 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1007 07:56:48.895817 6286 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1007 07:56:48.895825 6286 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1007 07:56:48.897786 6286 ovnkube.go:599] Stopped ovnkube\\\\nI1007 07:56:48.897999 6286 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1007 07:56:48.898118 6286 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b3b1269130a0a1192546bec4aa5b2d4bfd65f381f6cd3948dd54d49b482e6d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e7cdfd8fed2f21e7063841e5382e668130288feffd52ef6fc5f9b5bc1894f2a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e7cdfd8fed2f21e7063841e5382e668130288feffd52ef6fc5f9b5bc1894f2a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-8tcxj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:50Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:50 crc kubenswrapper[4875]: I1007 07:56:50.612280 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ddx6l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0605d52-0cfc-4bcf-9218-1991257047cd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bdc7823b5b4f40adc245f9650864ebcb03f791dcbd6167f41c362bcdcc3a6655\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l9bwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:39Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ddx6l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:50Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:50 crc kubenswrapper[4875]: I1007 07:56:50.636865 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:50 crc kubenswrapper[4875]: I1007 07:56:50.636984 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:50 crc kubenswrapper[4875]: I1007 07:56:50.637010 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:50 crc kubenswrapper[4875]: I1007 07:56:50.637040 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:50 crc kubenswrapper[4875]: I1007 07:56:50.637063 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:50Z","lastTransitionTime":"2025-10-07T07:56:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:50 crc kubenswrapper[4875]: I1007 07:56:50.697180 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 07:56:50 crc kubenswrapper[4875]: E1007 07:56:50.697362 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 07:56:50 crc kubenswrapper[4875]: I1007 07:56:50.740831 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:50 crc kubenswrapper[4875]: I1007 07:56:50.740940 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:50 crc kubenswrapper[4875]: I1007 07:56:50.740969 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:50 crc kubenswrapper[4875]: I1007 07:56:50.741003 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:50 crc kubenswrapper[4875]: I1007 07:56:50.741026 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:50Z","lastTransitionTime":"2025-10-07T07:56:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:50 crc kubenswrapper[4875]: I1007 07:56:50.787429 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/network-metrics-daemon-wk8rw"] Oct 07 07:56:50 crc kubenswrapper[4875]: I1007 07:56:50.788592 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-wk8rw" Oct 07 07:56:50 crc kubenswrapper[4875]: E1007 07:56:50.788756 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-wk8rw" podUID="dce21abc-1295-4d45-bd26-07b7e37d674c" Oct 07 07:56:50 crc kubenswrapper[4875]: I1007 07:56:50.810523 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62ae4c53-7cf1-4229-84b4-045397dbcfba\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ae8881986c1b2abd841adfadbac7dc3d73096c5366485350536c08f478f9762\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://05f20f2103458077317614bcd5338d803c52c67e7dfc562103c817d33fa5bc79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f2213a20d36b8a125c6004957d5be402e0b85bbc2165ebd964ab499dfffb877\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://841f5c2218ad3912bf23dc2df80f24ca696829ba8e2c2d9722264688b25f0846\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:15Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:50Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:50 crc kubenswrapper[4875]: I1007 07:56:50.844596 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:50 crc kubenswrapper[4875]: I1007 07:56:50.844659 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:50 crc kubenswrapper[4875]: I1007 07:56:50.844678 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:50 crc kubenswrapper[4875]: I1007 07:56:50.844703 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:50 crc kubenswrapper[4875]: I1007 07:56:50.844725 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:50Z","lastTransitionTime":"2025-10-07T07:56:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:50 crc kubenswrapper[4875]: I1007 07:56:50.847396 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3527b3b9-1734-481d-9d80-9093e388afec\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7149243f8781d5a5e845821a8016bc06ec3678d69dfbde72d3429c48c64a98b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51d55feee1c9378afae0885cd063f39a2b4bd057db72c776941837d8d4098132\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6dc29635af9843109e3c200fc756d352e1a81b01dcc52a30a005291d1fa9f8d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e0b3573713f1b9d23177e5a1412d8db366ca718e6c19f6af9e234940505f2e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20382715fd9d79a3b60b16d950d6c4fe5b0d34ae14ad034f684d97762205263a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cfae59bc7d6183b2b5c6c04dd53b1bfc8b62abf90f67f74c7a56bf5039a5b50a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cfae59bc7d6183b2b5c6c04dd53b1bfc8b62abf90f67f74c7a56bf5039a5b50a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88968e9f639dc3664a87729128c531283b37b7c2da6aae70905f9f63a980fb54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88968e9f639dc3664a87729128c531283b37b7c2da6aae70905f9f63a980fb54\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://1ae0cbc996e8c2d24e8fbbf34b292d994bcc704c7b27fdc8b7fb7e64be0902ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1ae0cbc996e8c2d24e8fbbf34b292d994bcc704c7b27fdc8b7fb7e64be0902ca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:15Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:50Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:50 crc kubenswrapper[4875]: I1007 07:56:50.865048 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://455d92e6c0a43eabcbdc94940cb5bb0308a747e311fb79711b5ec8586d2f290c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e98da4809cdbbaa0bc7a3ed73df6eeb9260101f8336e16e3e93cba2be423f957\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:50Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:50 crc kubenswrapper[4875]: I1007 07:56:50.880465 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jx7vv\" (UniqueName: \"kubernetes.io/projected/dce21abc-1295-4d45-bd26-07b7e37d674c-kube-api-access-jx7vv\") pod \"network-metrics-daemon-wk8rw\" (UID: \"dce21abc-1295-4d45-bd26-07b7e37d674c\") " pod="openshift-multus/network-metrics-daemon-wk8rw" Oct 07 07:56:50 crc kubenswrapper[4875]: I1007 07:56:50.880600 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/dce21abc-1295-4d45-bd26-07b7e37d674c-metrics-certs\") pod \"network-metrics-daemon-wk8rw\" (UID: \"dce21abc-1295-4d45-bd26-07b7e37d674c\") " pod="openshift-multus/network-metrics-daemon-wk8rw" Oct 07 07:56:50 crc kubenswrapper[4875]: I1007 07:56:50.899564 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-8tcxj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f7806e48-48e7-4680-af2e-e93a05003370\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e5e9d96fa7102ca0dcbb0fcc156b21899217a32baae85be23379cca6c8a6a93c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2df6aadbe568d17b58738f194596f0e83432176769f7c5bd87e7d4000d3f1ed8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5eb9157d6f41a164c7cb952c983b441030a97a8f3b8009359440c5eba9846f26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8c31cbcb48e003dac873f108a5eb3aaff8d7016f7557fb683ae607d7819d7ceb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c944396320312ab92eb8dd7f1d771a00b965031717edbbeb2b467cee0788dec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6db9e77922c120dbfa1ccd55c96777ac3b18e0fcf8bb96956414858ae2c8fcff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://66cb029e42151e4a186166d27962b933e0e0e9c110f07ffe602c276fb7f8f504\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f27830fee340a34b4ca9b87883890143fca897750ccc09daf0f801fe6d16123a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-07T07:56:47Z\\\",\\\"message\\\":\\\"d (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1007 07:56:47.035259 6142 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1007 07:56:47.035301 6142 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1007 07:56:47.035336 6142 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1007 07:56:47.035368 6142 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1007 07:56:47.035374 6142 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1007 07:56:47.035385 6142 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1007 07:56:47.035390 6142 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1007 07:56:47.035428 6142 factory.go:656] Stopping watch factory\\\\nI1007 07:56:47.035440 6142 ovnkube.go:599] Stopped ovnkube\\\\nI1007 07:56:47.035463 6142 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1007 07:56:47.035473 6142 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1007 07:56:47.035481 6142 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1007 07:56:47.035486 6142 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1007 07:56:47.035492 6142 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1007 07:56:47.035497 6142 handler.go:208] Removed *v1.Node event handler 7\\\\nI10\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:44Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://66cb029e42151e4a186166d27962b933e0e0e9c110f07ffe602c276fb7f8f504\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-07T07:56:49Z\\\",\\\"message\\\":\\\"ReadOnlyMounts:*true,UserNamespaces:*true,},},NodeRuntimeHandler{Name:,Features:\\\\u0026NodeRuntimeHandlerFeatures{RecursiveReadOnlyMounts:*true,UserNamespaces:*true,},},NodeRuntimeHandler{Name:runc,Features:\\\\u0026NodeRuntimeHandlerFeatures{RecursiveReadOnlyMounts:*true,UserNamespaces:*false,},},},Features:nil,},}\\\\nI1007 07:56:48.894892 6286 egressqos.go:1009] Finished syncing EgressQoS node crc : 744.984µs\\\\nI1007 07:56:48.895270 6286 handler.go:208] Removed *v1.Node event handler 2\\\\nI1007 07:56:48.895720 6286 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1007 07:56:48.895756 6286 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1007 07:56:48.895762 6286 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1007 07:56:48.895780 6286 factory.go:656] Stopping watch factory\\\\nI1007 07:56:48.895802 6286 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1007 07:56:48.895817 6286 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1007 07:56:48.895825 6286 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1007 07:56:48.897786 6286 ovnkube.go:599] Stopped ovnkube\\\\nI1007 07:56:48.897999 6286 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1007 07:56:48.898118 6286 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b3b1269130a0a1192546bec4aa5b2d4bfd65f381f6cd3948dd54d49b482e6d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e7cdfd8fed2f21e7063841e5382e668130288feffd52ef6fc5f9b5bc1894f2a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e7cdfd8fed2f21e7063841e5382e668130288feffd52ef6fc5f9b5bc1894f2a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-8tcxj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:50Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:50 crc kubenswrapper[4875]: I1007 07:56:50.921341 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ddx6l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0605d52-0cfc-4bcf-9218-1991257047cd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bdc7823b5b4f40adc245f9650864ebcb03f791dcbd6167f41c362bcdcc3a6655\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l9bwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:39Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ddx6l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:50Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:50 crc kubenswrapper[4875]: I1007 07:56:50.942493 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3928c10c-c3da-41eb-96b2-629d67cfb31f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://27e5a1d7a65296f23b224f9ace2760c32b6e1ca146cf7a42a601de22632250d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qd4f6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34b235fca1c383e868641a953bb908901bcd10528ee9fb668c1f71eb6e0e978a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qd4f6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hx68m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:50Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:50 crc kubenswrapper[4875]: I1007 07:56:50.948817 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:50 crc kubenswrapper[4875]: I1007 07:56:50.948988 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:50 crc kubenswrapper[4875]: I1007 07:56:50.949019 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:50 crc kubenswrapper[4875]: I1007 07:56:50.949054 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:50 crc kubenswrapper[4875]: I1007 07:56:50.949078 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:50Z","lastTransitionTime":"2025-10-07T07:56:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:50 crc kubenswrapper[4875]: I1007 07:56:50.962999 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rbr4j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1d636971-3387-4f3c-b4a1-54a1da1e2fbe\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c836128e294716bdbbc7bf57f81f71a88656e83be509510722d13b3f66e7d3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7bd9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://12ef6aa200be5c0cc9b34435271b54e4852ad9118d7334d57e07c1f6f75100f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7bd9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:48Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-rbr4j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:50Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:50 crc kubenswrapper[4875]: I1007 07:56:50.981348 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jx7vv\" (UniqueName: \"kubernetes.io/projected/dce21abc-1295-4d45-bd26-07b7e37d674c-kube-api-access-jx7vv\") pod \"network-metrics-daemon-wk8rw\" (UID: \"dce21abc-1295-4d45-bd26-07b7e37d674c\") " pod="openshift-multus/network-metrics-daemon-wk8rw" Oct 07 07:56:50 crc kubenswrapper[4875]: I1007 07:56:50.981477 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/dce21abc-1295-4d45-bd26-07b7e37d674c-metrics-certs\") pod \"network-metrics-daemon-wk8rw\" (UID: \"dce21abc-1295-4d45-bd26-07b7e37d674c\") " pod="openshift-multus/network-metrics-daemon-wk8rw" Oct 07 07:56:50 crc kubenswrapper[4875]: E1007 07:56:50.981768 4875 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 07 07:56:50 crc kubenswrapper[4875]: E1007 07:56:50.981928 4875 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/dce21abc-1295-4d45-bd26-07b7e37d674c-metrics-certs podName:dce21abc-1295-4d45-bd26-07b7e37d674c nodeName:}" failed. No retries permitted until 2025-10-07 07:56:51.481848984 +0000 UTC m=+36.441619567 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/dce21abc-1295-4d45-bd26-07b7e37d674c-metrics-certs") pod "network-metrics-daemon-wk8rw" (UID: "dce21abc-1295-4d45-bd26-07b7e37d674c") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 07 07:56:50 crc kubenswrapper[4875]: I1007 07:56:50.982183 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"01cd6f59-4f16-41d3-b43b-cd3cb9efd9b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ca1c70fa4c2dd9e87cc15766e27c2735768f392d019d6004ae50c94595651c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4bed5cf79847998d3d72c69b2f5773d64affb1a1f13ecc3af99bda8307bb8d1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ba6ab5ec6a70a00059ba119970c6b139879c3c568475f2acf45b00f4ade9e22\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://671f4d3e518bf0230af61a6bfc3f70b8cea7f0e8f972ad605a78cded0306b084\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://671f4d3e518bf0230af61a6bfc3f70b8cea7f0e8f972ad605a78cded0306b084\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"message\\\":\\\"le observer\\\\nW1007 07:56:35.379471 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1007 07:56:35.379613 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1007 07:56:35.380419 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1733405231/tls.crt::/tmp/serving-cert-1733405231/tls.key\\\\\\\"\\\\nI1007 07:56:35.960385 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1007 07:56:35.963357 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1007 07:56:35.963380 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1007 07:56:35.963401 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1007 07:56:35.963407 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1007 07:56:35.978178 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1007 07:56:35.978231 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1007 07:56:35.978242 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI1007 07:56:35.978236 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1007 07:56:35.978254 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1007 07:56:35.978304 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1007 07:56:35.978312 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1007 07:56:35.978319 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1007 07:56:35.979012 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0b72e1d82ba1cfc118ea122535db5c358f503a17b497c21c481e1b56bb578e8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec502d866371b40ecc3c8728889725cc1c2d9c8be18dbce17935992980fc1b9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec502d866371b40ecc3c8728889725cc1c2d9c8be18dbce17935992980fc1b9a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:50Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:51 crc kubenswrapper[4875]: I1007 07:56:51.000716 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-8tcxj_f7806e48-48e7-4680-af2e-e93a05003370/ovnkube-controller/1.log" Oct 07 07:56:51 crc kubenswrapper[4875]: I1007 07:56:51.009000 4875 scope.go:117] "RemoveContainer" containerID="66cb029e42151e4a186166d27962b933e0e0e9c110f07ffe602c276fb7f8f504" Oct 07 07:56:51 crc kubenswrapper[4875]: E1007 07:56:51.009189 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-8tcxj_openshift-ovn-kubernetes(f7806e48-48e7-4680-af2e-e93a05003370)\"" pod="openshift-ovn-kubernetes/ovnkube-node-8tcxj" podUID="f7806e48-48e7-4680-af2e-e93a05003370" Oct 07 07:56:51 crc kubenswrapper[4875]: I1007 07:56:51.010252 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jx7vv\" (UniqueName: \"kubernetes.io/projected/dce21abc-1295-4d45-bd26-07b7e37d674c-kube-api-access-jx7vv\") pod \"network-metrics-daemon-wk8rw\" (UID: \"dce21abc-1295-4d45-bd26-07b7e37d674c\") " pod="openshift-multus/network-metrics-daemon-wk8rw" Oct 07 07:56:51 crc kubenswrapper[4875]: I1007 07:56:51.015184 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:51Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:51 crc kubenswrapper[4875]: I1007 07:56:51.033573 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43e570e7b8391b0f38f3228dc0380ac8ee23e83f498989343bdee6a53c3ede41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:51Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:51 crc kubenswrapper[4875]: I1007 07:56:51.049168 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01fe5bea18c473e716831e2a9a876a8517d6de78b9b870929ecc1c4397d33fb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:51Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:51 crc kubenswrapper[4875]: I1007 07:56:51.052810 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:51 crc kubenswrapper[4875]: I1007 07:56:51.052854 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:51 crc kubenswrapper[4875]: I1007 07:56:51.052872 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:51 crc kubenswrapper[4875]: I1007 07:56:51.052923 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:51 crc kubenswrapper[4875]: I1007 07:56:51.052944 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:51Z","lastTransitionTime":"2025-10-07T07:56:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:51 crc kubenswrapper[4875]: I1007 07:56:51.067786 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wc2jq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e5a790e1-c591-4cfc-930f-4805a923790b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0fda9fd366641ca5799c0ca6c7aa926af6b41609e157c11a15b2077d4a5ce3aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4s782\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wc2jq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:51Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:51 crc kubenswrapper[4875]: I1007 07:56:51.084282 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-wk8rw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dce21abc-1295-4d45-bd26-07b7e37d674c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:50Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:50Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jx7vv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jx7vv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:50Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-wk8rw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:51Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:51 crc kubenswrapper[4875]: I1007 07:56:51.100903 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:51Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:51 crc kubenswrapper[4875]: I1007 07:56:51.115574 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-9tw9m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f8b02a8a-b8d2-4097-b768-132e4c46938a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f6efce0afada950650dfd1a7345a5132509ead755458e53915fcf3335ffe73e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-9tw9m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:51Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:51 crc kubenswrapper[4875]: I1007 07:56:51.136057 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:51Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:51 crc kubenswrapper[4875]: I1007 07:56:51.156230 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-zk2kz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7021fc56-b485-4ca6-80e8-56665ade004f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff571e5d1552af3387066929e3c02a939848e65fbf853aec600b03e0f72d5d3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d91377f30f2c7676df79d679bad49e0a162667214bd7e6ed4ad8ef3606a8752\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d91377f30f2c7676df79d679bad49e0a162667214bd7e6ed4ad8ef3606a8752\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecf0876b0824ab4fd9c692b0455050ac0a2d61663f74f13f578aebd6f3869f3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ecf0876b0824ab4fd9c692b0455050ac0a2d61663f74f13f578aebd6f3869f3f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9714afc60a059f6aa35d965ae1e15ea03e01cf9162a44d632ceed9d772e966bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9714afc60a059f6aa35d965ae1e15ea03e01cf9162a44d632ceed9d772e966bf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1f6109783534c4c80c81daf9eb0f04a9fbced0b143aeaa0c76683b9d2d38669\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c1f6109783534c4c80c81daf9eb0f04a9fbced0b143aeaa0c76683b9d2d38669\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c07a8fe8d39f3de33d3b132c33386a5c63a0948a1205f48e9a8788dd77ba8bc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c07a8fe8d39f3de33d3b132c33386a5c63a0948a1205f48e9a8788dd77ba8bc2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bce803064d5256265cdaf7f6e16d02780eab07d41d1f547dedd1c1b85972f543\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bce803064d5256265cdaf7f6e16d02780eab07d41d1f547dedd1c1b85972f543\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-zk2kz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:51Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:51 crc kubenswrapper[4875]: I1007 07:56:51.156671 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:51 crc kubenswrapper[4875]: I1007 07:56:51.156730 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:51 crc kubenswrapper[4875]: I1007 07:56:51.156762 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:51 crc kubenswrapper[4875]: I1007 07:56:51.156787 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:51 crc kubenswrapper[4875]: I1007 07:56:51.156805 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:51Z","lastTransitionTime":"2025-10-07T07:56:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:51 crc kubenswrapper[4875]: I1007 07:56:51.174457 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:51Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:51 crc kubenswrapper[4875]: I1007 07:56:51.195676 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-zk2kz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7021fc56-b485-4ca6-80e8-56665ade004f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff571e5d1552af3387066929e3c02a939848e65fbf853aec600b03e0f72d5d3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d91377f30f2c7676df79d679bad49e0a162667214bd7e6ed4ad8ef3606a8752\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d91377f30f2c7676df79d679bad49e0a162667214bd7e6ed4ad8ef3606a8752\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecf0876b0824ab4fd9c692b0455050ac0a2d61663f74f13f578aebd6f3869f3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ecf0876b0824ab4fd9c692b0455050ac0a2d61663f74f13f578aebd6f3869f3f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9714afc60a059f6aa35d965ae1e15ea03e01cf9162a44d632ceed9d772e966bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9714afc60a059f6aa35d965ae1e15ea03e01cf9162a44d632ceed9d772e966bf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1f6109783534c4c80c81daf9eb0f04a9fbced0b143aeaa0c76683b9d2d38669\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c1f6109783534c4c80c81daf9eb0f04a9fbced0b143aeaa0c76683b9d2d38669\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c07a8fe8d39f3de33d3b132c33386a5c63a0948a1205f48e9a8788dd77ba8bc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c07a8fe8d39f3de33d3b132c33386a5c63a0948a1205f48e9a8788dd77ba8bc2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bce803064d5256265cdaf7f6e16d02780eab07d41d1f547dedd1c1b85972f543\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bce803064d5256265cdaf7f6e16d02780eab07d41d1f547dedd1c1b85972f543\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-zk2kz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:51Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:51 crc kubenswrapper[4875]: I1007 07:56:51.211850 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ddx6l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0605d52-0cfc-4bcf-9218-1991257047cd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bdc7823b5b4f40adc245f9650864ebcb03f791dcbd6167f41c362bcdcc3a6655\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l9bwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:39Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ddx6l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:51Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:51 crc kubenswrapper[4875]: I1007 07:56:51.228365 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62ae4c53-7cf1-4229-84b4-045397dbcfba\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ae8881986c1b2abd841adfadbac7dc3d73096c5366485350536c08f478f9762\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://05f20f2103458077317614bcd5338d803c52c67e7dfc562103c817d33fa5bc79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f2213a20d36b8a125c6004957d5be402e0b85bbc2165ebd964ab499dfffb877\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://841f5c2218ad3912bf23dc2df80f24ca696829ba8e2c2d9722264688b25f0846\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:15Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:51Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:51 crc kubenswrapper[4875]: I1007 07:56:51.258724 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3527b3b9-1734-481d-9d80-9093e388afec\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7149243f8781d5a5e845821a8016bc06ec3678d69dfbde72d3429c48c64a98b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51d55feee1c9378afae0885cd063f39a2b4bd057db72c776941837d8d4098132\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6dc29635af9843109e3c200fc756d352e1a81b01dcc52a30a005291d1fa9f8d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e0b3573713f1b9d23177e5a1412d8db366ca718e6c19f6af9e234940505f2e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20382715fd9d79a3b60b16d950d6c4fe5b0d34ae14ad034f684d97762205263a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cfae59bc7d6183b2b5c6c04dd53b1bfc8b62abf90f67f74c7a56bf5039a5b50a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cfae59bc7d6183b2b5c6c04dd53b1bfc8b62abf90f67f74c7a56bf5039a5b50a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88968e9f639dc3664a87729128c531283b37b7c2da6aae70905f9f63a980fb54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88968e9f639dc3664a87729128c531283b37b7c2da6aae70905f9f63a980fb54\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://1ae0cbc996e8c2d24e8fbbf34b292d994bcc704c7b27fdc8b7fb7e64be0902ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1ae0cbc996e8c2d24e8fbbf34b292d994bcc704c7b27fdc8b7fb7e64be0902ca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:15Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:51Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:51 crc kubenswrapper[4875]: I1007 07:56:51.259726 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:51 crc kubenswrapper[4875]: I1007 07:56:51.259773 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:51 crc kubenswrapper[4875]: I1007 07:56:51.259790 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:51 crc kubenswrapper[4875]: I1007 07:56:51.259824 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:51 crc kubenswrapper[4875]: I1007 07:56:51.259843 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:51Z","lastTransitionTime":"2025-10-07T07:56:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:51 crc kubenswrapper[4875]: I1007 07:56:51.282151 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://455d92e6c0a43eabcbdc94940cb5bb0308a747e311fb79711b5ec8586d2f290c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e98da4809cdbbaa0bc7a3ed73df6eeb9260101f8336e16e3e93cba2be423f957\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:51Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:51 crc kubenswrapper[4875]: I1007 07:56:51.313384 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-8tcxj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f7806e48-48e7-4680-af2e-e93a05003370\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e5e9d96fa7102ca0dcbb0fcc156b21899217a32baae85be23379cca6c8a6a93c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2df6aadbe568d17b58738f194596f0e83432176769f7c5bd87e7d4000d3f1ed8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5eb9157d6f41a164c7cb952c983b441030a97a8f3b8009359440c5eba9846f26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8c31cbcb48e003dac873f108a5eb3aaff8d7016f7557fb683ae607d7819d7ceb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c944396320312ab92eb8dd7f1d771a00b965031717edbbeb2b467cee0788dec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6db9e77922c120dbfa1ccd55c96777ac3b18e0fcf8bb96956414858ae2c8fcff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://66cb029e42151e4a186166d27962b933e0e0e9c110f07ffe602c276fb7f8f504\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://66cb029e42151e4a186166d27962b933e0e0e9c110f07ffe602c276fb7f8f504\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-07T07:56:49Z\\\",\\\"message\\\":\\\"ReadOnlyMounts:*true,UserNamespaces:*true,},},NodeRuntimeHandler{Name:,Features:\\\\u0026NodeRuntimeHandlerFeatures{RecursiveReadOnlyMounts:*true,UserNamespaces:*true,},},NodeRuntimeHandler{Name:runc,Features:\\\\u0026NodeRuntimeHandlerFeatures{RecursiveReadOnlyMounts:*true,UserNamespaces:*false,},},},Features:nil,},}\\\\nI1007 07:56:48.894892 6286 egressqos.go:1009] Finished syncing EgressQoS node crc : 744.984µs\\\\nI1007 07:56:48.895270 6286 handler.go:208] Removed *v1.Node event handler 2\\\\nI1007 07:56:48.895720 6286 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1007 07:56:48.895756 6286 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1007 07:56:48.895762 6286 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1007 07:56:48.895780 6286 factory.go:656] Stopping watch factory\\\\nI1007 07:56:48.895802 6286 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1007 07:56:48.895817 6286 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1007 07:56:48.895825 6286 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1007 07:56:48.897786 6286 ovnkube.go:599] Stopped ovnkube\\\\nI1007 07:56:48.897999 6286 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1007 07:56:48.898118 6286 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:48Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-8tcxj_openshift-ovn-kubernetes(f7806e48-48e7-4680-af2e-e93a05003370)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b3b1269130a0a1192546bec4aa5b2d4bfd65f381f6cd3948dd54d49b482e6d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e7cdfd8fed2f21e7063841e5382e668130288feffd52ef6fc5f9b5bc1894f2a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e7cdfd8fed2f21e7063841e5382e668130288feffd52ef6fc5f9b5bc1894f2a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-8tcxj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:51Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:51 crc kubenswrapper[4875]: I1007 07:56:51.337964 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01fe5bea18c473e716831e2a9a876a8517d6de78b9b870929ecc1c4397d33fb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:51Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:51 crc kubenswrapper[4875]: I1007 07:56:51.359231 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wc2jq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e5a790e1-c591-4cfc-930f-4805a923790b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0fda9fd366641ca5799c0ca6c7aa926af6b41609e157c11a15b2077d4a5ce3aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4s782\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wc2jq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:51Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:51 crc kubenswrapper[4875]: I1007 07:56:51.363040 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:51 crc kubenswrapper[4875]: I1007 07:56:51.363103 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:51 crc kubenswrapper[4875]: I1007 07:56:51.363121 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:51 crc kubenswrapper[4875]: I1007 07:56:51.363148 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:51 crc kubenswrapper[4875]: I1007 07:56:51.363167 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:51Z","lastTransitionTime":"2025-10-07T07:56:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:51 crc kubenswrapper[4875]: I1007 07:56:51.378973 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3928c10c-c3da-41eb-96b2-629d67cfb31f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://27e5a1d7a65296f23b224f9ace2760c32b6e1ca146cf7a42a601de22632250d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qd4f6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34b235fca1c383e868641a953bb908901bcd10528ee9fb668c1f71eb6e0e978a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qd4f6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hx68m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:51Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:51 crc kubenswrapper[4875]: I1007 07:56:51.383768 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 07:56:51 crc kubenswrapper[4875]: E1007 07:56:51.383938 4875 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 07:57:07.383911665 +0000 UTC m=+52.343682218 (durationBeforeRetry 16s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 07:56:51 crc kubenswrapper[4875]: I1007 07:56:51.384010 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 07:56:51 crc kubenswrapper[4875]: I1007 07:56:51.384058 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 07:56:51 crc kubenswrapper[4875]: I1007 07:56:51.384088 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 07:56:51 crc kubenswrapper[4875]: I1007 07:56:51.384135 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 07:56:51 crc kubenswrapper[4875]: E1007 07:56:51.384222 4875 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 07 07:56:51 crc kubenswrapper[4875]: E1007 07:56:51.384234 4875 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 07 07:56:51 crc kubenswrapper[4875]: E1007 07:56:51.384253 4875 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 07 07:56:51 crc kubenswrapper[4875]: E1007 07:56:51.384275 4875 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 07 07:56:51 crc kubenswrapper[4875]: E1007 07:56:51.384295 4875 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 07 07:56:51 crc kubenswrapper[4875]: E1007 07:56:51.384277 4875 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-07 07:57:07.384267076 +0000 UTC m=+52.344037629 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 07 07:56:51 crc kubenswrapper[4875]: E1007 07:56:51.384335 4875 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-07 07:57:07.384325458 +0000 UTC m=+52.344095991 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 07 07:56:51 crc kubenswrapper[4875]: E1007 07:56:51.384353 4875 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-07 07:57:07.384345569 +0000 UTC m=+52.344116112 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 07 07:56:51 crc kubenswrapper[4875]: E1007 07:56:51.384353 4875 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 07 07:56:51 crc kubenswrapper[4875]: E1007 07:56:51.384395 4875 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 07 07:56:51 crc kubenswrapper[4875]: E1007 07:56:51.384420 4875 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 07 07:56:51 crc kubenswrapper[4875]: E1007 07:56:51.384511 4875 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-07 07:57:07.384477293 +0000 UTC m=+52.344247866 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 07 07:56:51 crc kubenswrapper[4875]: I1007 07:56:51.396387 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rbr4j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1d636971-3387-4f3c-b4a1-54a1da1e2fbe\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c836128e294716bdbbc7bf57f81f71a88656e83be509510722d13b3f66e7d3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7bd9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://12ef6aa200be5c0cc9b34435271b54e4852ad9118d7334d57e07c1f6f75100f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7bd9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:48Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-rbr4j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:51Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:51 crc kubenswrapper[4875]: I1007 07:56:51.416599 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"01cd6f59-4f16-41d3-b43b-cd3cb9efd9b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ca1c70fa4c2dd9e87cc15766e27c2735768f392d019d6004ae50c94595651c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4bed5cf79847998d3d72c69b2f5773d64affb1a1f13ecc3af99bda8307bb8d1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ba6ab5ec6a70a00059ba119970c6b139879c3c568475f2acf45b00f4ade9e22\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://671f4d3e518bf0230af61a6bfc3f70b8cea7f0e8f972ad605a78cded0306b084\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://671f4d3e518bf0230af61a6bfc3f70b8cea7f0e8f972ad605a78cded0306b084\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"message\\\":\\\"le observer\\\\nW1007 07:56:35.379471 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1007 07:56:35.379613 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1007 07:56:35.380419 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1733405231/tls.crt::/tmp/serving-cert-1733405231/tls.key\\\\\\\"\\\\nI1007 07:56:35.960385 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1007 07:56:35.963357 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1007 07:56:35.963380 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1007 07:56:35.963401 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1007 07:56:35.963407 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1007 07:56:35.978178 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1007 07:56:35.978231 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1007 07:56:35.978242 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI1007 07:56:35.978236 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1007 07:56:35.978254 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1007 07:56:35.978304 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1007 07:56:35.978312 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1007 07:56:35.978319 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1007 07:56:35.979012 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0b72e1d82ba1cfc118ea122535db5c358f503a17b497c21c481e1b56bb578e8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec502d866371b40ecc3c8728889725cc1c2d9c8be18dbce17935992980fc1b9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec502d866371b40ecc3c8728889725cc1c2d9c8be18dbce17935992980fc1b9a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:51Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:51 crc kubenswrapper[4875]: I1007 07:56:51.434984 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:51Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:51 crc kubenswrapper[4875]: I1007 07:56:51.459224 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43e570e7b8391b0f38f3228dc0380ac8ee23e83f498989343bdee6a53c3ede41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:51Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:51 crc kubenswrapper[4875]: I1007 07:56:51.468442 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:51 crc kubenswrapper[4875]: I1007 07:56:51.468503 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:51 crc kubenswrapper[4875]: I1007 07:56:51.468523 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:51 crc kubenswrapper[4875]: I1007 07:56:51.468555 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:51 crc kubenswrapper[4875]: I1007 07:56:51.468574 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:51Z","lastTransitionTime":"2025-10-07T07:56:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:51 crc kubenswrapper[4875]: I1007 07:56:51.474218 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-wk8rw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dce21abc-1295-4d45-bd26-07b7e37d674c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:50Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:50Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jx7vv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jx7vv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:50Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-wk8rw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:51Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:51 crc kubenswrapper[4875]: I1007 07:56:51.484567 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/dce21abc-1295-4d45-bd26-07b7e37d674c-metrics-certs\") pod \"network-metrics-daemon-wk8rw\" (UID: \"dce21abc-1295-4d45-bd26-07b7e37d674c\") " pod="openshift-multus/network-metrics-daemon-wk8rw" Oct 07 07:56:51 crc kubenswrapper[4875]: E1007 07:56:51.484761 4875 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 07 07:56:51 crc kubenswrapper[4875]: E1007 07:56:51.484872 4875 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/dce21abc-1295-4d45-bd26-07b7e37d674c-metrics-certs podName:dce21abc-1295-4d45-bd26-07b7e37d674c nodeName:}" failed. No retries permitted until 2025-10-07 07:56:52.484841591 +0000 UTC m=+37.444612174 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/dce21abc-1295-4d45-bd26-07b7e37d674c-metrics-certs") pod "network-metrics-daemon-wk8rw" (UID: "dce21abc-1295-4d45-bd26-07b7e37d674c") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 07 07:56:51 crc kubenswrapper[4875]: I1007 07:56:51.492600 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:51Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:51 crc kubenswrapper[4875]: I1007 07:56:51.507140 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-9tw9m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f8b02a8a-b8d2-4097-b768-132e4c46938a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f6efce0afada950650dfd1a7345a5132509ead755458e53915fcf3335ffe73e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-9tw9m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:51Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:51 crc kubenswrapper[4875]: I1007 07:56:51.571667 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:51 crc kubenswrapper[4875]: I1007 07:56:51.571716 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:51 crc kubenswrapper[4875]: I1007 07:56:51.571727 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:51 crc kubenswrapper[4875]: I1007 07:56:51.571741 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:51 crc kubenswrapper[4875]: I1007 07:56:51.571751 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:51Z","lastTransitionTime":"2025-10-07T07:56:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:51 crc kubenswrapper[4875]: I1007 07:56:51.674869 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:51 crc kubenswrapper[4875]: I1007 07:56:51.674979 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:51 crc kubenswrapper[4875]: I1007 07:56:51.675020 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:51 crc kubenswrapper[4875]: I1007 07:56:51.675048 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:51 crc kubenswrapper[4875]: I1007 07:56:51.675065 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:51Z","lastTransitionTime":"2025-10-07T07:56:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:51 crc kubenswrapper[4875]: I1007 07:56:51.696954 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 07:56:51 crc kubenswrapper[4875]: I1007 07:56:51.697158 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 07:56:51 crc kubenswrapper[4875]: E1007 07:56:51.697180 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 07:56:51 crc kubenswrapper[4875]: E1007 07:56:51.697474 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 07:56:51 crc kubenswrapper[4875]: I1007 07:56:51.703279 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:51 crc kubenswrapper[4875]: I1007 07:56:51.703327 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:51 crc kubenswrapper[4875]: I1007 07:56:51.703345 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:51 crc kubenswrapper[4875]: I1007 07:56:51.703368 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:51 crc kubenswrapper[4875]: I1007 07:56:51.703387 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:51Z","lastTransitionTime":"2025-10-07T07:56:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:51 crc kubenswrapper[4875]: E1007 07:56:51.724200 4875 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T07:56:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T07:56:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:51Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T07:56:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T07:56:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:51Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"e390eb55-bfef-4c82-ba02-53426a3fd939\\\",\\\"systemUUID\\\":\\\"288fc606-a984-4194-ac49-303e4a239cb9\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:51Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:51 crc kubenswrapper[4875]: I1007 07:56:51.730659 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:51 crc kubenswrapper[4875]: I1007 07:56:51.730727 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:51 crc kubenswrapper[4875]: I1007 07:56:51.730751 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:51 crc kubenswrapper[4875]: I1007 07:56:51.730803 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:51 crc kubenswrapper[4875]: I1007 07:56:51.730841 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:51Z","lastTransitionTime":"2025-10-07T07:56:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:51 crc kubenswrapper[4875]: E1007 07:56:51.756745 4875 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T07:56:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T07:56:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:51Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T07:56:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T07:56:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:51Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"e390eb55-bfef-4c82-ba02-53426a3fd939\\\",\\\"systemUUID\\\":\\\"288fc606-a984-4194-ac49-303e4a239cb9\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:51Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:51 crc kubenswrapper[4875]: I1007 07:56:51.762148 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:51 crc kubenswrapper[4875]: I1007 07:56:51.762233 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:51 crc kubenswrapper[4875]: I1007 07:56:51.762263 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:51 crc kubenswrapper[4875]: I1007 07:56:51.762303 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:51 crc kubenswrapper[4875]: I1007 07:56:51.762330 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:51Z","lastTransitionTime":"2025-10-07T07:56:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:51 crc kubenswrapper[4875]: E1007 07:56:51.785460 4875 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T07:56:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T07:56:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:51Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T07:56:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T07:56:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:51Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"e390eb55-bfef-4c82-ba02-53426a3fd939\\\",\\\"systemUUID\\\":\\\"288fc606-a984-4194-ac49-303e4a239cb9\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:51Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:51 crc kubenswrapper[4875]: I1007 07:56:51.792106 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:51 crc kubenswrapper[4875]: I1007 07:56:51.792171 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:51 crc kubenswrapper[4875]: I1007 07:56:51.792188 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:51 crc kubenswrapper[4875]: I1007 07:56:51.792210 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:51 crc kubenswrapper[4875]: I1007 07:56:51.792226 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:51Z","lastTransitionTime":"2025-10-07T07:56:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:51 crc kubenswrapper[4875]: E1007 07:56:51.814101 4875 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T07:56:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T07:56:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:51Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T07:56:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T07:56:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:51Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"e390eb55-bfef-4c82-ba02-53426a3fd939\\\",\\\"systemUUID\\\":\\\"288fc606-a984-4194-ac49-303e4a239cb9\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:51Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:51 crc kubenswrapper[4875]: I1007 07:56:51.819455 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:51 crc kubenswrapper[4875]: I1007 07:56:51.819522 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:51 crc kubenswrapper[4875]: I1007 07:56:51.819543 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:51 crc kubenswrapper[4875]: I1007 07:56:51.819573 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:51 crc kubenswrapper[4875]: I1007 07:56:51.819596 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:51Z","lastTransitionTime":"2025-10-07T07:56:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:51 crc kubenswrapper[4875]: E1007 07:56:51.842269 4875 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T07:56:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T07:56:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:51Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T07:56:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T07:56:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:51Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"e390eb55-bfef-4c82-ba02-53426a3fd939\\\",\\\"systemUUID\\\":\\\"288fc606-a984-4194-ac49-303e4a239cb9\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:51Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:51 crc kubenswrapper[4875]: E1007 07:56:51.842493 4875 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Oct 07 07:56:51 crc kubenswrapper[4875]: I1007 07:56:51.844745 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:51 crc kubenswrapper[4875]: I1007 07:56:51.844827 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:51 crc kubenswrapper[4875]: I1007 07:56:51.844845 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:51 crc kubenswrapper[4875]: I1007 07:56:51.844869 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:51 crc kubenswrapper[4875]: I1007 07:56:51.844905 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:51Z","lastTransitionTime":"2025-10-07T07:56:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:51 crc kubenswrapper[4875]: I1007 07:56:51.949222 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:51 crc kubenswrapper[4875]: I1007 07:56:51.949303 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:51 crc kubenswrapper[4875]: I1007 07:56:51.949326 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:51 crc kubenswrapper[4875]: I1007 07:56:51.949356 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:51 crc kubenswrapper[4875]: I1007 07:56:51.949375 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:51Z","lastTransitionTime":"2025-10-07T07:56:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:52 crc kubenswrapper[4875]: I1007 07:56:52.052739 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:52 crc kubenswrapper[4875]: I1007 07:56:52.052813 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:52 crc kubenswrapper[4875]: I1007 07:56:52.052838 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:52 crc kubenswrapper[4875]: I1007 07:56:52.052871 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:52 crc kubenswrapper[4875]: I1007 07:56:52.052948 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:52Z","lastTransitionTime":"2025-10-07T07:56:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:52 crc kubenswrapper[4875]: I1007 07:56:52.156971 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:52 crc kubenswrapper[4875]: I1007 07:56:52.157057 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:52 crc kubenswrapper[4875]: I1007 07:56:52.157069 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:52 crc kubenswrapper[4875]: I1007 07:56:52.157090 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:52 crc kubenswrapper[4875]: I1007 07:56:52.157104 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:52Z","lastTransitionTime":"2025-10-07T07:56:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:52 crc kubenswrapper[4875]: I1007 07:56:52.260012 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:52 crc kubenswrapper[4875]: I1007 07:56:52.260071 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:52 crc kubenswrapper[4875]: I1007 07:56:52.260083 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:52 crc kubenswrapper[4875]: I1007 07:56:52.260104 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:52 crc kubenswrapper[4875]: I1007 07:56:52.260120 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:52Z","lastTransitionTime":"2025-10-07T07:56:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:52 crc kubenswrapper[4875]: I1007 07:56:52.362629 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:52 crc kubenswrapper[4875]: I1007 07:56:52.362715 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:52 crc kubenswrapper[4875]: I1007 07:56:52.362739 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:52 crc kubenswrapper[4875]: I1007 07:56:52.362771 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:52 crc kubenswrapper[4875]: I1007 07:56:52.362791 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:52Z","lastTransitionTime":"2025-10-07T07:56:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:52 crc kubenswrapper[4875]: I1007 07:56:52.466275 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:52 crc kubenswrapper[4875]: I1007 07:56:52.466360 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:52 crc kubenswrapper[4875]: I1007 07:56:52.466383 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:52 crc kubenswrapper[4875]: I1007 07:56:52.466416 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:52 crc kubenswrapper[4875]: I1007 07:56:52.466440 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:52Z","lastTransitionTime":"2025-10-07T07:56:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:52 crc kubenswrapper[4875]: I1007 07:56:52.494465 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/dce21abc-1295-4d45-bd26-07b7e37d674c-metrics-certs\") pod \"network-metrics-daemon-wk8rw\" (UID: \"dce21abc-1295-4d45-bd26-07b7e37d674c\") " pod="openshift-multus/network-metrics-daemon-wk8rw" Oct 07 07:56:52 crc kubenswrapper[4875]: E1007 07:56:52.494661 4875 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 07 07:56:52 crc kubenswrapper[4875]: E1007 07:56:52.494779 4875 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/dce21abc-1295-4d45-bd26-07b7e37d674c-metrics-certs podName:dce21abc-1295-4d45-bd26-07b7e37d674c nodeName:}" failed. No retries permitted until 2025-10-07 07:56:54.494746649 +0000 UTC m=+39.454517222 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/dce21abc-1295-4d45-bd26-07b7e37d674c-metrics-certs") pod "network-metrics-daemon-wk8rw" (UID: "dce21abc-1295-4d45-bd26-07b7e37d674c") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 07 07:56:52 crc kubenswrapper[4875]: I1007 07:56:52.570099 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:52 crc kubenswrapper[4875]: I1007 07:56:52.570192 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:52 crc kubenswrapper[4875]: I1007 07:56:52.570219 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:52 crc kubenswrapper[4875]: I1007 07:56:52.570259 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:52 crc kubenswrapper[4875]: I1007 07:56:52.570286 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:52Z","lastTransitionTime":"2025-10-07T07:56:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:52 crc kubenswrapper[4875]: I1007 07:56:52.676101 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:52 crc kubenswrapper[4875]: I1007 07:56:52.676186 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:52 crc kubenswrapper[4875]: I1007 07:56:52.676208 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:52 crc kubenswrapper[4875]: I1007 07:56:52.676241 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:52 crc kubenswrapper[4875]: I1007 07:56:52.676273 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:52Z","lastTransitionTime":"2025-10-07T07:56:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:52 crc kubenswrapper[4875]: I1007 07:56:52.696869 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 07:56:52 crc kubenswrapper[4875]: E1007 07:56:52.697204 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 07:56:52 crc kubenswrapper[4875]: I1007 07:56:52.696951 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-wk8rw" Oct 07 07:56:52 crc kubenswrapper[4875]: E1007 07:56:52.697768 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-wk8rw" podUID="dce21abc-1295-4d45-bd26-07b7e37d674c" Oct 07 07:56:52 crc kubenswrapper[4875]: I1007 07:56:52.779948 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:52 crc kubenswrapper[4875]: I1007 07:56:52.779999 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:52 crc kubenswrapper[4875]: I1007 07:56:52.780011 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:52 crc kubenswrapper[4875]: I1007 07:56:52.780032 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:52 crc kubenswrapper[4875]: I1007 07:56:52.780060 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:52Z","lastTransitionTime":"2025-10-07T07:56:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:52 crc kubenswrapper[4875]: I1007 07:56:52.883290 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:52 crc kubenswrapper[4875]: I1007 07:56:52.883376 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:52 crc kubenswrapper[4875]: I1007 07:56:52.883397 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:52 crc kubenswrapper[4875]: I1007 07:56:52.883428 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:52 crc kubenswrapper[4875]: I1007 07:56:52.883453 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:52Z","lastTransitionTime":"2025-10-07T07:56:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:52 crc kubenswrapper[4875]: I1007 07:56:52.987925 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:52 crc kubenswrapper[4875]: I1007 07:56:52.988012 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:52 crc kubenswrapper[4875]: I1007 07:56:52.988032 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:52 crc kubenswrapper[4875]: I1007 07:56:52.988063 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:52 crc kubenswrapper[4875]: I1007 07:56:52.988083 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:52Z","lastTransitionTime":"2025-10-07T07:56:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:53 crc kubenswrapper[4875]: I1007 07:56:53.091597 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:53 crc kubenswrapper[4875]: I1007 07:56:53.091682 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:53 crc kubenswrapper[4875]: I1007 07:56:53.091706 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:53 crc kubenswrapper[4875]: I1007 07:56:53.091739 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:53 crc kubenswrapper[4875]: I1007 07:56:53.091766 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:53Z","lastTransitionTime":"2025-10-07T07:56:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:53 crc kubenswrapper[4875]: I1007 07:56:53.195942 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:53 crc kubenswrapper[4875]: I1007 07:56:53.196004 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:53 crc kubenswrapper[4875]: I1007 07:56:53.196022 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:53 crc kubenswrapper[4875]: I1007 07:56:53.196047 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:53 crc kubenswrapper[4875]: I1007 07:56:53.196067 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:53Z","lastTransitionTime":"2025-10-07T07:56:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:53 crc kubenswrapper[4875]: I1007 07:56:53.298913 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:53 crc kubenswrapper[4875]: I1007 07:56:53.298958 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:53 crc kubenswrapper[4875]: I1007 07:56:53.298967 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:53 crc kubenswrapper[4875]: I1007 07:56:53.298984 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:53 crc kubenswrapper[4875]: I1007 07:56:53.298994 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:53Z","lastTransitionTime":"2025-10-07T07:56:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:53 crc kubenswrapper[4875]: I1007 07:56:53.401934 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:53 crc kubenswrapper[4875]: I1007 07:56:53.402009 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:53 crc kubenswrapper[4875]: I1007 07:56:53.402025 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:53 crc kubenswrapper[4875]: I1007 07:56:53.402043 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:53 crc kubenswrapper[4875]: I1007 07:56:53.402056 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:53Z","lastTransitionTime":"2025-10-07T07:56:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:53 crc kubenswrapper[4875]: I1007 07:56:53.504671 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:53 crc kubenswrapper[4875]: I1007 07:56:53.504734 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:53 crc kubenswrapper[4875]: I1007 07:56:53.504751 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:53 crc kubenswrapper[4875]: I1007 07:56:53.504770 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:53 crc kubenswrapper[4875]: I1007 07:56:53.504787 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:53Z","lastTransitionTime":"2025-10-07T07:56:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:53 crc kubenswrapper[4875]: I1007 07:56:53.607991 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:53 crc kubenswrapper[4875]: I1007 07:56:53.608055 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:53 crc kubenswrapper[4875]: I1007 07:56:53.608065 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:53 crc kubenswrapper[4875]: I1007 07:56:53.608084 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:53 crc kubenswrapper[4875]: I1007 07:56:53.608096 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:53Z","lastTransitionTime":"2025-10-07T07:56:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:53 crc kubenswrapper[4875]: I1007 07:56:53.697370 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 07:56:53 crc kubenswrapper[4875]: I1007 07:56:53.697476 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 07:56:53 crc kubenswrapper[4875]: E1007 07:56:53.697526 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 07:56:53 crc kubenswrapper[4875]: E1007 07:56:53.697632 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 07:56:53 crc kubenswrapper[4875]: I1007 07:56:53.711452 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:53 crc kubenswrapper[4875]: I1007 07:56:53.711524 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:53 crc kubenswrapper[4875]: I1007 07:56:53.711551 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:53 crc kubenswrapper[4875]: I1007 07:56:53.711586 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:53 crc kubenswrapper[4875]: I1007 07:56:53.711608 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:53Z","lastTransitionTime":"2025-10-07T07:56:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:53 crc kubenswrapper[4875]: I1007 07:56:53.815864 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:53 crc kubenswrapper[4875]: I1007 07:56:53.815945 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:53 crc kubenswrapper[4875]: I1007 07:56:53.815960 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:53 crc kubenswrapper[4875]: I1007 07:56:53.815985 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:53 crc kubenswrapper[4875]: I1007 07:56:53.816018 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:53Z","lastTransitionTime":"2025-10-07T07:56:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:53 crc kubenswrapper[4875]: I1007 07:56:53.919658 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:53 crc kubenswrapper[4875]: I1007 07:56:53.919716 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:53 crc kubenswrapper[4875]: I1007 07:56:53.919732 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:53 crc kubenswrapper[4875]: I1007 07:56:53.919756 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:53 crc kubenswrapper[4875]: I1007 07:56:53.919769 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:53Z","lastTransitionTime":"2025-10-07T07:56:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:54 crc kubenswrapper[4875]: I1007 07:56:54.022298 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:54 crc kubenswrapper[4875]: I1007 07:56:54.022386 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:54 crc kubenswrapper[4875]: I1007 07:56:54.022404 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:54 crc kubenswrapper[4875]: I1007 07:56:54.022438 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:54 crc kubenswrapper[4875]: I1007 07:56:54.022459 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:54Z","lastTransitionTime":"2025-10-07T07:56:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:54 crc kubenswrapper[4875]: I1007 07:56:54.125800 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:54 crc kubenswrapper[4875]: I1007 07:56:54.125917 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:54 crc kubenswrapper[4875]: I1007 07:56:54.125939 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:54 crc kubenswrapper[4875]: I1007 07:56:54.125969 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:54 crc kubenswrapper[4875]: I1007 07:56:54.125997 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:54Z","lastTransitionTime":"2025-10-07T07:56:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:54 crc kubenswrapper[4875]: I1007 07:56:54.228725 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:54 crc kubenswrapper[4875]: I1007 07:56:54.228783 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:54 crc kubenswrapper[4875]: I1007 07:56:54.228796 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:54 crc kubenswrapper[4875]: I1007 07:56:54.228820 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:54 crc kubenswrapper[4875]: I1007 07:56:54.228834 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:54Z","lastTransitionTime":"2025-10-07T07:56:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:54 crc kubenswrapper[4875]: I1007 07:56:54.331679 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:54 crc kubenswrapper[4875]: I1007 07:56:54.331769 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:54 crc kubenswrapper[4875]: I1007 07:56:54.331786 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:54 crc kubenswrapper[4875]: I1007 07:56:54.331818 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:54 crc kubenswrapper[4875]: I1007 07:56:54.331835 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:54Z","lastTransitionTime":"2025-10-07T07:56:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:54 crc kubenswrapper[4875]: I1007 07:56:54.435032 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:54 crc kubenswrapper[4875]: I1007 07:56:54.435097 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:54 crc kubenswrapper[4875]: I1007 07:56:54.435116 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:54 crc kubenswrapper[4875]: I1007 07:56:54.435142 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:54 crc kubenswrapper[4875]: I1007 07:56:54.435161 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:54Z","lastTransitionTime":"2025-10-07T07:56:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:54 crc kubenswrapper[4875]: I1007 07:56:54.515387 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/dce21abc-1295-4d45-bd26-07b7e37d674c-metrics-certs\") pod \"network-metrics-daemon-wk8rw\" (UID: \"dce21abc-1295-4d45-bd26-07b7e37d674c\") " pod="openshift-multus/network-metrics-daemon-wk8rw" Oct 07 07:56:54 crc kubenswrapper[4875]: E1007 07:56:54.515507 4875 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 07 07:56:54 crc kubenswrapper[4875]: E1007 07:56:54.515557 4875 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/dce21abc-1295-4d45-bd26-07b7e37d674c-metrics-certs podName:dce21abc-1295-4d45-bd26-07b7e37d674c nodeName:}" failed. No retries permitted until 2025-10-07 07:56:58.515544139 +0000 UTC m=+43.475314682 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/dce21abc-1295-4d45-bd26-07b7e37d674c-metrics-certs") pod "network-metrics-daemon-wk8rw" (UID: "dce21abc-1295-4d45-bd26-07b7e37d674c") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 07 07:56:54 crc kubenswrapper[4875]: I1007 07:56:54.538408 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:54 crc kubenswrapper[4875]: I1007 07:56:54.538449 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:54 crc kubenswrapper[4875]: I1007 07:56:54.538458 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:54 crc kubenswrapper[4875]: I1007 07:56:54.538476 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:54 crc kubenswrapper[4875]: I1007 07:56:54.538491 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:54Z","lastTransitionTime":"2025-10-07T07:56:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:54 crc kubenswrapper[4875]: I1007 07:56:54.641463 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:54 crc kubenswrapper[4875]: I1007 07:56:54.641508 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:54 crc kubenswrapper[4875]: I1007 07:56:54.641521 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:54 crc kubenswrapper[4875]: I1007 07:56:54.641538 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:54 crc kubenswrapper[4875]: I1007 07:56:54.641552 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:54Z","lastTransitionTime":"2025-10-07T07:56:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:54 crc kubenswrapper[4875]: I1007 07:56:54.696841 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-wk8rw" Oct 07 07:56:54 crc kubenswrapper[4875]: I1007 07:56:54.696845 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 07:56:54 crc kubenswrapper[4875]: E1007 07:56:54.697806 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-wk8rw" podUID="dce21abc-1295-4d45-bd26-07b7e37d674c" Oct 07 07:56:54 crc kubenswrapper[4875]: E1007 07:56:54.697998 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 07:56:54 crc kubenswrapper[4875]: I1007 07:56:54.745063 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:54 crc kubenswrapper[4875]: I1007 07:56:54.745117 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:54 crc kubenswrapper[4875]: I1007 07:56:54.745129 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:54 crc kubenswrapper[4875]: I1007 07:56:54.745152 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:54 crc kubenswrapper[4875]: I1007 07:56:54.745165 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:54Z","lastTransitionTime":"2025-10-07T07:56:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:54 crc kubenswrapper[4875]: I1007 07:56:54.848937 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:54 crc kubenswrapper[4875]: I1007 07:56:54.849035 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:54 crc kubenswrapper[4875]: I1007 07:56:54.849063 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:54 crc kubenswrapper[4875]: I1007 07:56:54.849099 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:54 crc kubenswrapper[4875]: I1007 07:56:54.849124 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:54Z","lastTransitionTime":"2025-10-07T07:56:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:54 crc kubenswrapper[4875]: I1007 07:56:54.954580 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:54 crc kubenswrapper[4875]: I1007 07:56:54.954636 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:54 crc kubenswrapper[4875]: I1007 07:56:54.954652 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:54 crc kubenswrapper[4875]: I1007 07:56:54.954672 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:54 crc kubenswrapper[4875]: I1007 07:56:54.954686 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:54Z","lastTransitionTime":"2025-10-07T07:56:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:55 crc kubenswrapper[4875]: I1007 07:56:55.058186 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:55 crc kubenswrapper[4875]: I1007 07:56:55.058264 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:55 crc kubenswrapper[4875]: I1007 07:56:55.058277 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:55 crc kubenswrapper[4875]: I1007 07:56:55.058301 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:55 crc kubenswrapper[4875]: I1007 07:56:55.058316 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:55Z","lastTransitionTime":"2025-10-07T07:56:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:55 crc kubenswrapper[4875]: I1007 07:56:55.162047 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:55 crc kubenswrapper[4875]: I1007 07:56:55.162135 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:55 crc kubenswrapper[4875]: I1007 07:56:55.162154 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:55 crc kubenswrapper[4875]: I1007 07:56:55.162188 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:55 crc kubenswrapper[4875]: I1007 07:56:55.162207 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:55Z","lastTransitionTime":"2025-10-07T07:56:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:55 crc kubenswrapper[4875]: I1007 07:56:55.265311 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:55 crc kubenswrapper[4875]: I1007 07:56:55.265397 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:55 crc kubenswrapper[4875]: I1007 07:56:55.265418 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:55 crc kubenswrapper[4875]: I1007 07:56:55.265450 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:55 crc kubenswrapper[4875]: I1007 07:56:55.265478 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:55Z","lastTransitionTime":"2025-10-07T07:56:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:55 crc kubenswrapper[4875]: I1007 07:56:55.369568 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:55 crc kubenswrapper[4875]: I1007 07:56:55.369697 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:55 crc kubenswrapper[4875]: I1007 07:56:55.369724 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:55 crc kubenswrapper[4875]: I1007 07:56:55.369764 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:55 crc kubenswrapper[4875]: I1007 07:56:55.369796 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:55Z","lastTransitionTime":"2025-10-07T07:56:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:55 crc kubenswrapper[4875]: I1007 07:56:55.473405 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:55 crc kubenswrapper[4875]: I1007 07:56:55.473486 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:55 crc kubenswrapper[4875]: I1007 07:56:55.473508 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:55 crc kubenswrapper[4875]: I1007 07:56:55.473531 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:55 crc kubenswrapper[4875]: I1007 07:56:55.473554 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:55Z","lastTransitionTime":"2025-10-07T07:56:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:55 crc kubenswrapper[4875]: I1007 07:56:55.576109 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:55 crc kubenswrapper[4875]: I1007 07:56:55.576169 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:55 crc kubenswrapper[4875]: I1007 07:56:55.576192 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:55 crc kubenswrapper[4875]: I1007 07:56:55.576220 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:55 crc kubenswrapper[4875]: I1007 07:56:55.576242 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:55Z","lastTransitionTime":"2025-10-07T07:56:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:55 crc kubenswrapper[4875]: I1007 07:56:55.678304 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:55 crc kubenswrapper[4875]: I1007 07:56:55.678677 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:55 crc kubenswrapper[4875]: I1007 07:56:55.678830 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:55 crc kubenswrapper[4875]: I1007 07:56:55.679021 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:55 crc kubenswrapper[4875]: I1007 07:56:55.679220 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:55Z","lastTransitionTime":"2025-10-07T07:56:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:55 crc kubenswrapper[4875]: I1007 07:56:55.697334 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 07:56:55 crc kubenswrapper[4875]: I1007 07:56:55.697416 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 07:56:55 crc kubenswrapper[4875]: E1007 07:56:55.697509 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 07:56:55 crc kubenswrapper[4875]: E1007 07:56:55.697816 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 07:56:55 crc kubenswrapper[4875]: I1007 07:56:55.719628 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:55Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:55 crc kubenswrapper[4875]: I1007 07:56:55.739723 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-zk2kz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7021fc56-b485-4ca6-80e8-56665ade004f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff571e5d1552af3387066929e3c02a939848e65fbf853aec600b03e0f72d5d3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d91377f30f2c7676df79d679bad49e0a162667214bd7e6ed4ad8ef3606a8752\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d91377f30f2c7676df79d679bad49e0a162667214bd7e6ed4ad8ef3606a8752\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecf0876b0824ab4fd9c692b0455050ac0a2d61663f74f13f578aebd6f3869f3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ecf0876b0824ab4fd9c692b0455050ac0a2d61663f74f13f578aebd6f3869f3f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9714afc60a059f6aa35d965ae1e15ea03e01cf9162a44d632ceed9d772e966bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9714afc60a059f6aa35d965ae1e15ea03e01cf9162a44d632ceed9d772e966bf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1f6109783534c4c80c81daf9eb0f04a9fbced0b143aeaa0c76683b9d2d38669\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c1f6109783534c4c80c81daf9eb0f04a9fbced0b143aeaa0c76683b9d2d38669\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c07a8fe8d39f3de33d3b132c33386a5c63a0948a1205f48e9a8788dd77ba8bc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c07a8fe8d39f3de33d3b132c33386a5c63a0948a1205f48e9a8788dd77ba8bc2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bce803064d5256265cdaf7f6e16d02780eab07d41d1f547dedd1c1b85972f543\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bce803064d5256265cdaf7f6e16d02780eab07d41d1f547dedd1c1b85972f543\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-zk2kz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:55Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:55 crc kubenswrapper[4875]: I1007 07:56:55.755692 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ddx6l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0605d52-0cfc-4bcf-9218-1991257047cd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bdc7823b5b4f40adc245f9650864ebcb03f791dcbd6167f41c362bcdcc3a6655\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l9bwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:39Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ddx6l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:55Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:55 crc kubenswrapper[4875]: I1007 07:56:55.776741 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62ae4c53-7cf1-4229-84b4-045397dbcfba\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ae8881986c1b2abd841adfadbac7dc3d73096c5366485350536c08f478f9762\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://05f20f2103458077317614bcd5338d803c52c67e7dfc562103c817d33fa5bc79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f2213a20d36b8a125c6004957d5be402e0b85bbc2165ebd964ab499dfffb877\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://841f5c2218ad3912bf23dc2df80f24ca696829ba8e2c2d9722264688b25f0846\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:15Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:55Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:55 crc kubenswrapper[4875]: I1007 07:56:55.782039 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:55 crc kubenswrapper[4875]: I1007 07:56:55.782106 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:55 crc kubenswrapper[4875]: I1007 07:56:55.782124 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:55 crc kubenswrapper[4875]: I1007 07:56:55.782146 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:55 crc kubenswrapper[4875]: I1007 07:56:55.782158 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:55Z","lastTransitionTime":"2025-10-07T07:56:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:55 crc kubenswrapper[4875]: I1007 07:56:55.815486 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3527b3b9-1734-481d-9d80-9093e388afec\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7149243f8781d5a5e845821a8016bc06ec3678d69dfbde72d3429c48c64a98b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51d55feee1c9378afae0885cd063f39a2b4bd057db72c776941837d8d4098132\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6dc29635af9843109e3c200fc756d352e1a81b01dcc52a30a005291d1fa9f8d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e0b3573713f1b9d23177e5a1412d8db366ca718e6c19f6af9e234940505f2e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20382715fd9d79a3b60b16d950d6c4fe5b0d34ae14ad034f684d97762205263a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cfae59bc7d6183b2b5c6c04dd53b1bfc8b62abf90f67f74c7a56bf5039a5b50a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cfae59bc7d6183b2b5c6c04dd53b1bfc8b62abf90f67f74c7a56bf5039a5b50a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88968e9f639dc3664a87729128c531283b37b7c2da6aae70905f9f63a980fb54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88968e9f639dc3664a87729128c531283b37b7c2da6aae70905f9f63a980fb54\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://1ae0cbc996e8c2d24e8fbbf34b292d994bcc704c7b27fdc8b7fb7e64be0902ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1ae0cbc996e8c2d24e8fbbf34b292d994bcc704c7b27fdc8b7fb7e64be0902ca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:15Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:55Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:55 crc kubenswrapper[4875]: I1007 07:56:55.836384 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://455d92e6c0a43eabcbdc94940cb5bb0308a747e311fb79711b5ec8586d2f290c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e98da4809cdbbaa0bc7a3ed73df6eeb9260101f8336e16e3e93cba2be423f957\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:55Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:55 crc kubenswrapper[4875]: I1007 07:56:55.867984 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-8tcxj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f7806e48-48e7-4680-af2e-e93a05003370\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e5e9d96fa7102ca0dcbb0fcc156b21899217a32baae85be23379cca6c8a6a93c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2df6aadbe568d17b58738f194596f0e83432176769f7c5bd87e7d4000d3f1ed8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5eb9157d6f41a164c7cb952c983b441030a97a8f3b8009359440c5eba9846f26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8c31cbcb48e003dac873f108a5eb3aaff8d7016f7557fb683ae607d7819d7ceb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c944396320312ab92eb8dd7f1d771a00b965031717edbbeb2b467cee0788dec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6db9e77922c120dbfa1ccd55c96777ac3b18e0fcf8bb96956414858ae2c8fcff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://66cb029e42151e4a186166d27962b933e0e0e9c110f07ffe602c276fb7f8f504\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://66cb029e42151e4a186166d27962b933e0e0e9c110f07ffe602c276fb7f8f504\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-07T07:56:49Z\\\",\\\"message\\\":\\\"ReadOnlyMounts:*true,UserNamespaces:*true,},},NodeRuntimeHandler{Name:,Features:\\\\u0026NodeRuntimeHandlerFeatures{RecursiveReadOnlyMounts:*true,UserNamespaces:*true,},},NodeRuntimeHandler{Name:runc,Features:\\\\u0026NodeRuntimeHandlerFeatures{RecursiveReadOnlyMounts:*true,UserNamespaces:*false,},},},Features:nil,},}\\\\nI1007 07:56:48.894892 6286 egressqos.go:1009] Finished syncing EgressQoS node crc : 744.984µs\\\\nI1007 07:56:48.895270 6286 handler.go:208] Removed *v1.Node event handler 2\\\\nI1007 07:56:48.895720 6286 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1007 07:56:48.895756 6286 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1007 07:56:48.895762 6286 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1007 07:56:48.895780 6286 factory.go:656] Stopping watch factory\\\\nI1007 07:56:48.895802 6286 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1007 07:56:48.895817 6286 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1007 07:56:48.895825 6286 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1007 07:56:48.897786 6286 ovnkube.go:599] Stopped ovnkube\\\\nI1007 07:56:48.897999 6286 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1007 07:56:48.898118 6286 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:48Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-8tcxj_openshift-ovn-kubernetes(f7806e48-48e7-4680-af2e-e93a05003370)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b3b1269130a0a1192546bec4aa5b2d4bfd65f381f6cd3948dd54d49b482e6d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e7cdfd8fed2f21e7063841e5382e668130288feffd52ef6fc5f9b5bc1894f2a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e7cdfd8fed2f21e7063841e5382e668130288feffd52ef6fc5f9b5bc1894f2a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-8tcxj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:55Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:55 crc kubenswrapper[4875]: I1007 07:56:55.889521 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:55 crc kubenswrapper[4875]: I1007 07:56:55.889571 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:55 crc kubenswrapper[4875]: I1007 07:56:55.889583 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:55 crc kubenswrapper[4875]: I1007 07:56:55.889601 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:55 crc kubenswrapper[4875]: I1007 07:56:55.889613 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:55Z","lastTransitionTime":"2025-10-07T07:56:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:55 crc kubenswrapper[4875]: I1007 07:56:55.892945 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01fe5bea18c473e716831e2a9a876a8517d6de78b9b870929ecc1c4397d33fb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:55Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:55 crc kubenswrapper[4875]: I1007 07:56:55.917621 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wc2jq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e5a790e1-c591-4cfc-930f-4805a923790b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0fda9fd366641ca5799c0ca6c7aa926af6b41609e157c11a15b2077d4a5ce3aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4s782\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wc2jq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:55Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:55 crc kubenswrapper[4875]: I1007 07:56:55.938259 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3928c10c-c3da-41eb-96b2-629d67cfb31f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://27e5a1d7a65296f23b224f9ace2760c32b6e1ca146cf7a42a601de22632250d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qd4f6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34b235fca1c383e868641a953bb908901bcd10528ee9fb668c1f71eb6e0e978a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qd4f6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hx68m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:55Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:55 crc kubenswrapper[4875]: I1007 07:56:55.956361 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rbr4j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1d636971-3387-4f3c-b4a1-54a1da1e2fbe\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c836128e294716bdbbc7bf57f81f71a88656e83be509510722d13b3f66e7d3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7bd9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://12ef6aa200be5c0cc9b34435271b54e4852ad9118d7334d57e07c1f6f75100f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7bd9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:48Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-rbr4j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:55Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:55 crc kubenswrapper[4875]: I1007 07:56:55.973604 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"01cd6f59-4f16-41d3-b43b-cd3cb9efd9b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ca1c70fa4c2dd9e87cc15766e27c2735768f392d019d6004ae50c94595651c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4bed5cf79847998d3d72c69b2f5773d64affb1a1f13ecc3af99bda8307bb8d1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ba6ab5ec6a70a00059ba119970c6b139879c3c568475f2acf45b00f4ade9e22\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://671f4d3e518bf0230af61a6bfc3f70b8cea7f0e8f972ad605a78cded0306b084\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://671f4d3e518bf0230af61a6bfc3f70b8cea7f0e8f972ad605a78cded0306b084\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"message\\\":\\\"le observer\\\\nW1007 07:56:35.379471 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1007 07:56:35.379613 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1007 07:56:35.380419 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1733405231/tls.crt::/tmp/serving-cert-1733405231/tls.key\\\\\\\"\\\\nI1007 07:56:35.960385 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1007 07:56:35.963357 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1007 07:56:35.963380 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1007 07:56:35.963401 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1007 07:56:35.963407 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1007 07:56:35.978178 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1007 07:56:35.978231 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1007 07:56:35.978242 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI1007 07:56:35.978236 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1007 07:56:35.978254 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1007 07:56:35.978304 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1007 07:56:35.978312 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1007 07:56:35.978319 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1007 07:56:35.979012 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0b72e1d82ba1cfc118ea122535db5c358f503a17b497c21c481e1b56bb578e8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec502d866371b40ecc3c8728889725cc1c2d9c8be18dbce17935992980fc1b9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec502d866371b40ecc3c8728889725cc1c2d9c8be18dbce17935992980fc1b9a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:55Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:55 crc kubenswrapper[4875]: I1007 07:56:55.990225 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:55Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:55 crc kubenswrapper[4875]: I1007 07:56:55.992191 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:55 crc kubenswrapper[4875]: I1007 07:56:55.992361 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:55 crc kubenswrapper[4875]: I1007 07:56:55.992390 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:55 crc kubenswrapper[4875]: I1007 07:56:55.992512 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:55 crc kubenswrapper[4875]: I1007 07:56:55.992539 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:55Z","lastTransitionTime":"2025-10-07T07:56:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:56 crc kubenswrapper[4875]: I1007 07:56:56.008678 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43e570e7b8391b0f38f3228dc0380ac8ee23e83f498989343bdee6a53c3ede41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:56Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:56 crc kubenswrapper[4875]: I1007 07:56:56.025113 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-wk8rw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dce21abc-1295-4d45-bd26-07b7e37d674c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:50Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:50Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jx7vv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jx7vv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:50Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-wk8rw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:56Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:56 crc kubenswrapper[4875]: I1007 07:56:56.045551 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:56Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:56 crc kubenswrapper[4875]: I1007 07:56:56.061348 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-9tw9m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f8b02a8a-b8d2-4097-b768-132e4c46938a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f6efce0afada950650dfd1a7345a5132509ead755458e53915fcf3335ffe73e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-9tw9m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:56Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:56 crc kubenswrapper[4875]: I1007 07:56:56.095279 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:56 crc kubenswrapper[4875]: I1007 07:56:56.095344 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:56 crc kubenswrapper[4875]: I1007 07:56:56.095362 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:56 crc kubenswrapper[4875]: I1007 07:56:56.095394 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:56 crc kubenswrapper[4875]: I1007 07:56:56.095412 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:56Z","lastTransitionTime":"2025-10-07T07:56:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:56 crc kubenswrapper[4875]: I1007 07:56:56.198645 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:56 crc kubenswrapper[4875]: I1007 07:56:56.198743 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:56 crc kubenswrapper[4875]: I1007 07:56:56.198767 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:56 crc kubenswrapper[4875]: I1007 07:56:56.198795 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:56 crc kubenswrapper[4875]: I1007 07:56:56.198814 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:56Z","lastTransitionTime":"2025-10-07T07:56:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:56 crc kubenswrapper[4875]: I1007 07:56:56.301820 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:56 crc kubenswrapper[4875]: I1007 07:56:56.301934 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:56 crc kubenswrapper[4875]: I1007 07:56:56.301959 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:56 crc kubenswrapper[4875]: I1007 07:56:56.301987 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:56 crc kubenswrapper[4875]: I1007 07:56:56.302007 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:56Z","lastTransitionTime":"2025-10-07T07:56:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:56 crc kubenswrapper[4875]: I1007 07:56:56.405244 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:56 crc kubenswrapper[4875]: I1007 07:56:56.405699 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:56 crc kubenswrapper[4875]: I1007 07:56:56.405869 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:56 crc kubenswrapper[4875]: I1007 07:56:56.406065 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:56 crc kubenswrapper[4875]: I1007 07:56:56.406297 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:56Z","lastTransitionTime":"2025-10-07T07:56:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:56 crc kubenswrapper[4875]: I1007 07:56:56.509122 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:56 crc kubenswrapper[4875]: I1007 07:56:56.509165 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:56 crc kubenswrapper[4875]: I1007 07:56:56.509177 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:56 crc kubenswrapper[4875]: I1007 07:56:56.509195 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:56 crc kubenswrapper[4875]: I1007 07:56:56.509211 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:56Z","lastTransitionTime":"2025-10-07T07:56:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:56 crc kubenswrapper[4875]: I1007 07:56:56.611899 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:56 crc kubenswrapper[4875]: I1007 07:56:56.611932 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:56 crc kubenswrapper[4875]: I1007 07:56:56.611940 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:56 crc kubenswrapper[4875]: I1007 07:56:56.611953 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:56 crc kubenswrapper[4875]: I1007 07:56:56.611961 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:56Z","lastTransitionTime":"2025-10-07T07:56:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:56 crc kubenswrapper[4875]: I1007 07:56:56.697535 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 07:56:56 crc kubenswrapper[4875]: I1007 07:56:56.697532 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-wk8rw" Oct 07 07:56:56 crc kubenswrapper[4875]: E1007 07:56:56.697723 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 07:56:56 crc kubenswrapper[4875]: E1007 07:56:56.697867 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-wk8rw" podUID="dce21abc-1295-4d45-bd26-07b7e37d674c" Oct 07 07:56:56 crc kubenswrapper[4875]: I1007 07:56:56.713917 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:56 crc kubenswrapper[4875]: I1007 07:56:56.713968 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:56 crc kubenswrapper[4875]: I1007 07:56:56.713986 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:56 crc kubenswrapper[4875]: I1007 07:56:56.714009 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:56 crc kubenswrapper[4875]: I1007 07:56:56.714027 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:56Z","lastTransitionTime":"2025-10-07T07:56:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:56 crc kubenswrapper[4875]: I1007 07:56:56.816521 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:56 crc kubenswrapper[4875]: I1007 07:56:56.816565 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:56 crc kubenswrapper[4875]: I1007 07:56:56.816583 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:56 crc kubenswrapper[4875]: I1007 07:56:56.816606 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:56 crc kubenswrapper[4875]: I1007 07:56:56.816623 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:56Z","lastTransitionTime":"2025-10-07T07:56:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:56 crc kubenswrapper[4875]: I1007 07:56:56.919801 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:56 crc kubenswrapper[4875]: I1007 07:56:56.919854 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:56 crc kubenswrapper[4875]: I1007 07:56:56.919866 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:56 crc kubenswrapper[4875]: I1007 07:56:56.919902 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:56 crc kubenswrapper[4875]: I1007 07:56:56.919913 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:56Z","lastTransitionTime":"2025-10-07T07:56:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:57 crc kubenswrapper[4875]: I1007 07:56:57.022904 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:57 crc kubenswrapper[4875]: I1007 07:56:57.022972 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:57 crc kubenswrapper[4875]: I1007 07:56:57.022985 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:57 crc kubenswrapper[4875]: I1007 07:56:57.023009 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:57 crc kubenswrapper[4875]: I1007 07:56:57.023028 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:57Z","lastTransitionTime":"2025-10-07T07:56:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:57 crc kubenswrapper[4875]: I1007 07:56:57.126145 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:57 crc kubenswrapper[4875]: I1007 07:56:57.126238 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:57 crc kubenswrapper[4875]: I1007 07:56:57.126263 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:57 crc kubenswrapper[4875]: I1007 07:56:57.126304 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:57 crc kubenswrapper[4875]: I1007 07:56:57.126330 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:57Z","lastTransitionTime":"2025-10-07T07:56:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:57 crc kubenswrapper[4875]: I1007 07:56:57.230274 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:57 crc kubenswrapper[4875]: I1007 07:56:57.230360 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:57 crc kubenswrapper[4875]: I1007 07:56:57.230388 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:57 crc kubenswrapper[4875]: I1007 07:56:57.230422 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:57 crc kubenswrapper[4875]: I1007 07:56:57.230447 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:57Z","lastTransitionTime":"2025-10-07T07:56:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:57 crc kubenswrapper[4875]: I1007 07:56:57.334334 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:57 crc kubenswrapper[4875]: I1007 07:56:57.334411 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:57 crc kubenswrapper[4875]: I1007 07:56:57.334434 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:57 crc kubenswrapper[4875]: I1007 07:56:57.334465 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:57 crc kubenswrapper[4875]: I1007 07:56:57.334487 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:57Z","lastTransitionTime":"2025-10-07T07:56:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:57 crc kubenswrapper[4875]: I1007 07:56:57.437399 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:57 crc kubenswrapper[4875]: I1007 07:56:57.437473 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:57 crc kubenswrapper[4875]: I1007 07:56:57.437492 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:57 crc kubenswrapper[4875]: I1007 07:56:57.437516 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:57 crc kubenswrapper[4875]: I1007 07:56:57.437535 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:57Z","lastTransitionTime":"2025-10-07T07:56:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:57 crc kubenswrapper[4875]: I1007 07:56:57.541103 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:57 crc kubenswrapper[4875]: I1007 07:56:57.541149 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:57 crc kubenswrapper[4875]: I1007 07:56:57.541162 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:57 crc kubenswrapper[4875]: I1007 07:56:57.541179 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:57 crc kubenswrapper[4875]: I1007 07:56:57.541192 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:57Z","lastTransitionTime":"2025-10-07T07:56:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:57 crc kubenswrapper[4875]: I1007 07:56:57.644291 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:57 crc kubenswrapper[4875]: I1007 07:56:57.644377 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:57 crc kubenswrapper[4875]: I1007 07:56:57.644398 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:57 crc kubenswrapper[4875]: I1007 07:56:57.644431 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:57 crc kubenswrapper[4875]: I1007 07:56:57.644455 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:57Z","lastTransitionTime":"2025-10-07T07:56:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:57 crc kubenswrapper[4875]: I1007 07:56:57.697127 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 07:56:57 crc kubenswrapper[4875]: I1007 07:56:57.697183 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 07:56:57 crc kubenswrapper[4875]: E1007 07:56:57.697398 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 07:56:57 crc kubenswrapper[4875]: E1007 07:56:57.697603 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 07:56:57 crc kubenswrapper[4875]: I1007 07:56:57.747647 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:57 crc kubenswrapper[4875]: I1007 07:56:57.747758 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:57 crc kubenswrapper[4875]: I1007 07:56:57.747783 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:57 crc kubenswrapper[4875]: I1007 07:56:57.747817 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:57 crc kubenswrapper[4875]: I1007 07:56:57.747843 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:57Z","lastTransitionTime":"2025-10-07T07:56:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:57 crc kubenswrapper[4875]: I1007 07:56:57.851615 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:57 crc kubenswrapper[4875]: I1007 07:56:57.851665 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:57 crc kubenswrapper[4875]: I1007 07:56:57.851673 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:57 crc kubenswrapper[4875]: I1007 07:56:57.851687 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:57 crc kubenswrapper[4875]: I1007 07:56:57.851698 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:57Z","lastTransitionTime":"2025-10-07T07:56:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:57 crc kubenswrapper[4875]: I1007 07:56:57.954643 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:57 crc kubenswrapper[4875]: I1007 07:56:57.954696 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:57 crc kubenswrapper[4875]: I1007 07:56:57.954712 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:57 crc kubenswrapper[4875]: I1007 07:56:57.954735 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:57 crc kubenswrapper[4875]: I1007 07:56:57.954753 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:57Z","lastTransitionTime":"2025-10-07T07:56:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:58 crc kubenswrapper[4875]: I1007 07:56:58.057849 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:58 crc kubenswrapper[4875]: I1007 07:56:58.057946 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:58 crc kubenswrapper[4875]: I1007 07:56:58.057970 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:58 crc kubenswrapper[4875]: I1007 07:56:58.057999 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:58 crc kubenswrapper[4875]: I1007 07:56:58.058021 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:58Z","lastTransitionTime":"2025-10-07T07:56:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:58 crc kubenswrapper[4875]: I1007 07:56:58.160507 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:58 crc kubenswrapper[4875]: I1007 07:56:58.160542 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:58 crc kubenswrapper[4875]: I1007 07:56:58.160553 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:58 crc kubenswrapper[4875]: I1007 07:56:58.160569 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:58 crc kubenswrapper[4875]: I1007 07:56:58.160579 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:58Z","lastTransitionTime":"2025-10-07T07:56:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:58 crc kubenswrapper[4875]: I1007 07:56:58.263094 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:58 crc kubenswrapper[4875]: I1007 07:56:58.263147 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:58 crc kubenswrapper[4875]: I1007 07:56:58.263162 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:58 crc kubenswrapper[4875]: I1007 07:56:58.263184 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:58 crc kubenswrapper[4875]: I1007 07:56:58.263201 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:58Z","lastTransitionTime":"2025-10-07T07:56:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:58 crc kubenswrapper[4875]: I1007 07:56:58.365688 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:58 crc kubenswrapper[4875]: I1007 07:56:58.365745 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:58 crc kubenswrapper[4875]: I1007 07:56:58.365766 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:58 crc kubenswrapper[4875]: I1007 07:56:58.365793 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:58 crc kubenswrapper[4875]: I1007 07:56:58.365815 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:58Z","lastTransitionTime":"2025-10-07T07:56:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:58 crc kubenswrapper[4875]: I1007 07:56:58.468959 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:58 crc kubenswrapper[4875]: I1007 07:56:58.469030 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:58 crc kubenswrapper[4875]: I1007 07:56:58.469048 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:58 crc kubenswrapper[4875]: I1007 07:56:58.469074 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:58 crc kubenswrapper[4875]: I1007 07:56:58.469095 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:58Z","lastTransitionTime":"2025-10-07T07:56:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:58 crc kubenswrapper[4875]: I1007 07:56:58.565525 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/dce21abc-1295-4d45-bd26-07b7e37d674c-metrics-certs\") pod \"network-metrics-daemon-wk8rw\" (UID: \"dce21abc-1295-4d45-bd26-07b7e37d674c\") " pod="openshift-multus/network-metrics-daemon-wk8rw" Oct 07 07:56:58 crc kubenswrapper[4875]: E1007 07:56:58.565848 4875 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 07 07:56:58 crc kubenswrapper[4875]: E1007 07:56:58.566081 4875 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/dce21abc-1295-4d45-bd26-07b7e37d674c-metrics-certs podName:dce21abc-1295-4d45-bd26-07b7e37d674c nodeName:}" failed. No retries permitted until 2025-10-07 07:57:06.56604879 +0000 UTC m=+51.525819373 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/dce21abc-1295-4d45-bd26-07b7e37d674c-metrics-certs") pod "network-metrics-daemon-wk8rw" (UID: "dce21abc-1295-4d45-bd26-07b7e37d674c") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 07 07:56:58 crc kubenswrapper[4875]: I1007 07:56:58.572753 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:58 crc kubenswrapper[4875]: I1007 07:56:58.572790 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:58 crc kubenswrapper[4875]: I1007 07:56:58.572798 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:58 crc kubenswrapper[4875]: I1007 07:56:58.572811 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:58 crc kubenswrapper[4875]: I1007 07:56:58.572819 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:58Z","lastTransitionTime":"2025-10-07T07:56:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:58 crc kubenswrapper[4875]: I1007 07:56:58.675345 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:58 crc kubenswrapper[4875]: I1007 07:56:58.675418 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:58 crc kubenswrapper[4875]: I1007 07:56:58.675442 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:58 crc kubenswrapper[4875]: I1007 07:56:58.675471 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:58 crc kubenswrapper[4875]: I1007 07:56:58.675493 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:58Z","lastTransitionTime":"2025-10-07T07:56:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:58 crc kubenswrapper[4875]: I1007 07:56:58.696441 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-wk8rw" Oct 07 07:56:58 crc kubenswrapper[4875]: I1007 07:56:58.696581 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 07:56:58 crc kubenswrapper[4875]: E1007 07:56:58.696694 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-wk8rw" podUID="dce21abc-1295-4d45-bd26-07b7e37d674c" Oct 07 07:56:58 crc kubenswrapper[4875]: E1007 07:56:58.697320 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 07:56:58 crc kubenswrapper[4875]: I1007 07:56:58.698110 4875 scope.go:117] "RemoveContainer" containerID="671f4d3e518bf0230af61a6bfc3f70b8cea7f0e8f972ad605a78cded0306b084" Oct 07 07:56:58 crc kubenswrapper[4875]: I1007 07:56:58.778463 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:58 crc kubenswrapper[4875]: I1007 07:56:58.778519 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:58 crc kubenswrapper[4875]: I1007 07:56:58.778536 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:58 crc kubenswrapper[4875]: I1007 07:56:58.778558 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:58 crc kubenswrapper[4875]: I1007 07:56:58.778578 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:58Z","lastTransitionTime":"2025-10-07T07:56:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:58 crc kubenswrapper[4875]: I1007 07:56:58.880444 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:58 crc kubenswrapper[4875]: I1007 07:56:58.880479 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:58 crc kubenswrapper[4875]: I1007 07:56:58.880487 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:58 crc kubenswrapper[4875]: I1007 07:56:58.880499 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:58 crc kubenswrapper[4875]: I1007 07:56:58.880509 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:58Z","lastTransitionTime":"2025-10-07T07:56:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:58 crc kubenswrapper[4875]: I1007 07:56:58.983454 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:58 crc kubenswrapper[4875]: I1007 07:56:58.983507 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:58 crc kubenswrapper[4875]: I1007 07:56:58.983518 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:58 crc kubenswrapper[4875]: I1007 07:56:58.983540 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:58 crc kubenswrapper[4875]: I1007 07:56:58.983553 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:58Z","lastTransitionTime":"2025-10-07T07:56:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:59 crc kubenswrapper[4875]: I1007 07:56:59.041952 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/1.log" Oct 07 07:56:59 crc kubenswrapper[4875]: I1007 07:56:59.044395 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"10e2645f8bd2ed4e578809e1a15c65b5a0bf19a2bc135dd5f626b2b3dfce0113"} Oct 07 07:56:59 crc kubenswrapper[4875]: I1007 07:56:59.044704 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 07 07:56:59 crc kubenswrapper[4875]: I1007 07:56:59.060375 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62ae4c53-7cf1-4229-84b4-045397dbcfba\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ae8881986c1b2abd841adfadbac7dc3d73096c5366485350536c08f478f9762\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://05f20f2103458077317614bcd5338d803c52c67e7dfc562103c817d33fa5bc79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f2213a20d36b8a125c6004957d5be402e0b85bbc2165ebd964ab499dfffb877\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://841f5c2218ad3912bf23dc2df80f24ca696829ba8e2c2d9722264688b25f0846\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:15Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:59Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:59 crc kubenswrapper[4875]: I1007 07:56:59.080236 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3527b3b9-1734-481d-9d80-9093e388afec\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7149243f8781d5a5e845821a8016bc06ec3678d69dfbde72d3429c48c64a98b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51d55feee1c9378afae0885cd063f39a2b4bd057db72c776941837d8d4098132\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6dc29635af9843109e3c200fc756d352e1a81b01dcc52a30a005291d1fa9f8d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e0b3573713f1b9d23177e5a1412d8db366ca718e6c19f6af9e234940505f2e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20382715fd9d79a3b60b16d950d6c4fe5b0d34ae14ad034f684d97762205263a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cfae59bc7d6183b2b5c6c04dd53b1bfc8b62abf90f67f74c7a56bf5039a5b50a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cfae59bc7d6183b2b5c6c04dd53b1bfc8b62abf90f67f74c7a56bf5039a5b50a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88968e9f639dc3664a87729128c531283b37b7c2da6aae70905f9f63a980fb54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88968e9f639dc3664a87729128c531283b37b7c2da6aae70905f9f63a980fb54\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://1ae0cbc996e8c2d24e8fbbf34b292d994bcc704c7b27fdc8b7fb7e64be0902ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1ae0cbc996e8c2d24e8fbbf34b292d994bcc704c7b27fdc8b7fb7e64be0902ca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:15Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:59Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:59 crc kubenswrapper[4875]: I1007 07:56:59.086389 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:59 crc kubenswrapper[4875]: I1007 07:56:59.086440 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:59 crc kubenswrapper[4875]: I1007 07:56:59.086453 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:59 crc kubenswrapper[4875]: I1007 07:56:59.086474 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:59 crc kubenswrapper[4875]: I1007 07:56:59.086487 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:59Z","lastTransitionTime":"2025-10-07T07:56:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:59 crc kubenswrapper[4875]: I1007 07:56:59.095188 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://455d92e6c0a43eabcbdc94940cb5bb0308a747e311fb79711b5ec8586d2f290c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e98da4809cdbbaa0bc7a3ed73df6eeb9260101f8336e16e3e93cba2be423f957\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:59Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:59 crc kubenswrapper[4875]: I1007 07:56:59.114912 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-8tcxj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f7806e48-48e7-4680-af2e-e93a05003370\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e5e9d96fa7102ca0dcbb0fcc156b21899217a32baae85be23379cca6c8a6a93c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2df6aadbe568d17b58738f194596f0e83432176769f7c5bd87e7d4000d3f1ed8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5eb9157d6f41a164c7cb952c983b441030a97a8f3b8009359440c5eba9846f26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8c31cbcb48e003dac873f108a5eb3aaff8d7016f7557fb683ae607d7819d7ceb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c944396320312ab92eb8dd7f1d771a00b965031717edbbeb2b467cee0788dec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6db9e77922c120dbfa1ccd55c96777ac3b18e0fcf8bb96956414858ae2c8fcff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://66cb029e42151e4a186166d27962b933e0e0e9c110f07ffe602c276fb7f8f504\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://66cb029e42151e4a186166d27962b933e0e0e9c110f07ffe602c276fb7f8f504\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-07T07:56:49Z\\\",\\\"message\\\":\\\"ReadOnlyMounts:*true,UserNamespaces:*true,},},NodeRuntimeHandler{Name:,Features:\\\\u0026NodeRuntimeHandlerFeatures{RecursiveReadOnlyMounts:*true,UserNamespaces:*true,},},NodeRuntimeHandler{Name:runc,Features:\\\\u0026NodeRuntimeHandlerFeatures{RecursiveReadOnlyMounts:*true,UserNamespaces:*false,},},},Features:nil,},}\\\\nI1007 07:56:48.894892 6286 egressqos.go:1009] Finished syncing EgressQoS node crc : 744.984µs\\\\nI1007 07:56:48.895270 6286 handler.go:208] Removed *v1.Node event handler 2\\\\nI1007 07:56:48.895720 6286 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1007 07:56:48.895756 6286 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1007 07:56:48.895762 6286 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1007 07:56:48.895780 6286 factory.go:656] Stopping watch factory\\\\nI1007 07:56:48.895802 6286 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1007 07:56:48.895817 6286 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1007 07:56:48.895825 6286 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1007 07:56:48.897786 6286 ovnkube.go:599] Stopped ovnkube\\\\nI1007 07:56:48.897999 6286 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1007 07:56:48.898118 6286 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:48Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-8tcxj_openshift-ovn-kubernetes(f7806e48-48e7-4680-af2e-e93a05003370)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b3b1269130a0a1192546bec4aa5b2d4bfd65f381f6cd3948dd54d49b482e6d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e7cdfd8fed2f21e7063841e5382e668130288feffd52ef6fc5f9b5bc1894f2a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e7cdfd8fed2f21e7063841e5382e668130288feffd52ef6fc5f9b5bc1894f2a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-8tcxj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:59Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:59 crc kubenswrapper[4875]: I1007 07:56:59.126661 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ddx6l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0605d52-0cfc-4bcf-9218-1991257047cd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bdc7823b5b4f40adc245f9650864ebcb03f791dcbd6167f41c362bcdcc3a6655\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l9bwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:39Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ddx6l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:59Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:59 crc kubenswrapper[4875]: I1007 07:56:59.137546 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3928c10c-c3da-41eb-96b2-629d67cfb31f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://27e5a1d7a65296f23b224f9ace2760c32b6e1ca146cf7a42a601de22632250d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qd4f6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34b235fca1c383e868641a953bb908901bcd10528ee9fb668c1f71eb6e0e978a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qd4f6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hx68m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:59Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:59 crc kubenswrapper[4875]: I1007 07:56:59.148929 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rbr4j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1d636971-3387-4f3c-b4a1-54a1da1e2fbe\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c836128e294716bdbbc7bf57f81f71a88656e83be509510722d13b3f66e7d3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7bd9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://12ef6aa200be5c0cc9b34435271b54e4852ad9118d7334d57e07c1f6f75100f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7bd9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:48Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-rbr4j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:59Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:59 crc kubenswrapper[4875]: I1007 07:56:59.166326 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"01cd6f59-4f16-41d3-b43b-cd3cb9efd9b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ca1c70fa4c2dd9e87cc15766e27c2735768f392d019d6004ae50c94595651c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4bed5cf79847998d3d72c69b2f5773d64affb1a1f13ecc3af99bda8307bb8d1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ba6ab5ec6a70a00059ba119970c6b139879c3c568475f2acf45b00f4ade9e22\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10e2645f8bd2ed4e578809e1a15c65b5a0bf19a2bc135dd5f626b2b3dfce0113\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://671f4d3e518bf0230af61a6bfc3f70b8cea7f0e8f972ad605a78cded0306b084\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"message\\\":\\\"le observer\\\\nW1007 07:56:35.379471 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1007 07:56:35.379613 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1007 07:56:35.380419 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1733405231/tls.crt::/tmp/serving-cert-1733405231/tls.key\\\\\\\"\\\\nI1007 07:56:35.960385 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1007 07:56:35.963357 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1007 07:56:35.963380 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1007 07:56:35.963401 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1007 07:56:35.963407 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1007 07:56:35.978178 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1007 07:56:35.978231 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1007 07:56:35.978242 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI1007 07:56:35.978236 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1007 07:56:35.978254 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1007 07:56:35.978304 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1007 07:56:35.978312 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1007 07:56:35.978319 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1007 07:56:35.979012 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0b72e1d82ba1cfc118ea122535db5c358f503a17b497c21c481e1b56bb578e8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec502d866371b40ecc3c8728889725cc1c2d9c8be18dbce17935992980fc1b9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec502d866371b40ecc3c8728889725cc1c2d9c8be18dbce17935992980fc1b9a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:59Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:59 crc kubenswrapper[4875]: I1007 07:56:59.182118 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:59Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:59 crc kubenswrapper[4875]: I1007 07:56:59.189205 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:59 crc kubenswrapper[4875]: I1007 07:56:59.189257 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:59 crc kubenswrapper[4875]: I1007 07:56:59.189279 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:59 crc kubenswrapper[4875]: I1007 07:56:59.189309 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:59 crc kubenswrapper[4875]: I1007 07:56:59.189330 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:59Z","lastTransitionTime":"2025-10-07T07:56:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:59 crc kubenswrapper[4875]: I1007 07:56:59.198586 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43e570e7b8391b0f38f3228dc0380ac8ee23e83f498989343bdee6a53c3ede41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:59Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:59 crc kubenswrapper[4875]: I1007 07:56:59.215374 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01fe5bea18c473e716831e2a9a876a8517d6de78b9b870929ecc1c4397d33fb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:59Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:59 crc kubenswrapper[4875]: I1007 07:56:59.237821 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wc2jq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e5a790e1-c591-4cfc-930f-4805a923790b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0fda9fd366641ca5799c0ca6c7aa926af6b41609e157c11a15b2077d4a5ce3aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4s782\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wc2jq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:59Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:59 crc kubenswrapper[4875]: I1007 07:56:59.255896 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-wk8rw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dce21abc-1295-4d45-bd26-07b7e37d674c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:50Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:50Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jx7vv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jx7vv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:50Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-wk8rw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:59Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:59 crc kubenswrapper[4875]: I1007 07:56:59.278488 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:59Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:59 crc kubenswrapper[4875]: I1007 07:56:59.292790 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:59 crc kubenswrapper[4875]: I1007 07:56:59.292850 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:59 crc kubenswrapper[4875]: I1007 07:56:59.292867 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:59 crc kubenswrapper[4875]: I1007 07:56:59.292915 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:59 crc kubenswrapper[4875]: I1007 07:56:59.292933 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:59Z","lastTransitionTime":"2025-10-07T07:56:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:59 crc kubenswrapper[4875]: I1007 07:56:59.294409 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-9tw9m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f8b02a8a-b8d2-4097-b768-132e4c46938a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f6efce0afada950650dfd1a7345a5132509ead755458e53915fcf3335ffe73e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-9tw9m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:59Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:59 crc kubenswrapper[4875]: I1007 07:56:59.313626 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:59Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:59 crc kubenswrapper[4875]: I1007 07:56:59.335218 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-zk2kz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7021fc56-b485-4ca6-80e8-56665ade004f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff571e5d1552af3387066929e3c02a939848e65fbf853aec600b03e0f72d5d3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d91377f30f2c7676df79d679bad49e0a162667214bd7e6ed4ad8ef3606a8752\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d91377f30f2c7676df79d679bad49e0a162667214bd7e6ed4ad8ef3606a8752\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecf0876b0824ab4fd9c692b0455050ac0a2d61663f74f13f578aebd6f3869f3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ecf0876b0824ab4fd9c692b0455050ac0a2d61663f74f13f578aebd6f3869f3f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9714afc60a059f6aa35d965ae1e15ea03e01cf9162a44d632ceed9d772e966bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9714afc60a059f6aa35d965ae1e15ea03e01cf9162a44d632ceed9d772e966bf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1f6109783534c4c80c81daf9eb0f04a9fbced0b143aeaa0c76683b9d2d38669\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c1f6109783534c4c80c81daf9eb0f04a9fbced0b143aeaa0c76683b9d2d38669\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c07a8fe8d39f3de33d3b132c33386a5c63a0948a1205f48e9a8788dd77ba8bc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c07a8fe8d39f3de33d3b132c33386a5c63a0948a1205f48e9a8788dd77ba8bc2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bce803064d5256265cdaf7f6e16d02780eab07d41d1f547dedd1c1b85972f543\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bce803064d5256265cdaf7f6e16d02780eab07d41d1f547dedd1c1b85972f543\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-zk2kz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:56:59Z is after 2025-08-24T17:21:41Z" Oct 07 07:56:59 crc kubenswrapper[4875]: I1007 07:56:59.395662 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:59 crc kubenswrapper[4875]: I1007 07:56:59.395725 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:59 crc kubenswrapper[4875]: I1007 07:56:59.395746 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:59 crc kubenswrapper[4875]: I1007 07:56:59.395773 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:59 crc kubenswrapper[4875]: I1007 07:56:59.395788 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:59Z","lastTransitionTime":"2025-10-07T07:56:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:59 crc kubenswrapper[4875]: I1007 07:56:59.498716 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:59 crc kubenswrapper[4875]: I1007 07:56:59.498769 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:59 crc kubenswrapper[4875]: I1007 07:56:59.498779 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:59 crc kubenswrapper[4875]: I1007 07:56:59.498796 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:59 crc kubenswrapper[4875]: I1007 07:56:59.498806 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:59Z","lastTransitionTime":"2025-10-07T07:56:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:59 crc kubenswrapper[4875]: I1007 07:56:59.602344 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:59 crc kubenswrapper[4875]: I1007 07:56:59.602431 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:59 crc kubenswrapper[4875]: I1007 07:56:59.602452 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:59 crc kubenswrapper[4875]: I1007 07:56:59.602478 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:59 crc kubenswrapper[4875]: I1007 07:56:59.602498 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:59Z","lastTransitionTime":"2025-10-07T07:56:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:59 crc kubenswrapper[4875]: I1007 07:56:59.697671 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 07:56:59 crc kubenswrapper[4875]: I1007 07:56:59.697702 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 07:56:59 crc kubenswrapper[4875]: E1007 07:56:59.697981 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 07:56:59 crc kubenswrapper[4875]: E1007 07:56:59.698075 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 07:56:59 crc kubenswrapper[4875]: I1007 07:56:59.705750 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:59 crc kubenswrapper[4875]: I1007 07:56:59.705817 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:59 crc kubenswrapper[4875]: I1007 07:56:59.705845 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:59 crc kubenswrapper[4875]: I1007 07:56:59.705947 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:59 crc kubenswrapper[4875]: I1007 07:56:59.705976 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:59Z","lastTransitionTime":"2025-10-07T07:56:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:59 crc kubenswrapper[4875]: I1007 07:56:59.810488 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:59 crc kubenswrapper[4875]: I1007 07:56:59.810583 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:59 crc kubenswrapper[4875]: I1007 07:56:59.810611 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:59 crc kubenswrapper[4875]: I1007 07:56:59.810644 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:59 crc kubenswrapper[4875]: I1007 07:56:59.810665 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:59Z","lastTransitionTime":"2025-10-07T07:56:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:56:59 crc kubenswrapper[4875]: I1007 07:56:59.914305 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:56:59 crc kubenswrapper[4875]: I1007 07:56:59.914401 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:56:59 crc kubenswrapper[4875]: I1007 07:56:59.914426 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:56:59 crc kubenswrapper[4875]: I1007 07:56:59.914459 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:56:59 crc kubenswrapper[4875]: I1007 07:56:59.914478 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:56:59Z","lastTransitionTime":"2025-10-07T07:56:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:00 crc kubenswrapper[4875]: I1007 07:57:00.018210 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:00 crc kubenswrapper[4875]: I1007 07:57:00.018292 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:00 crc kubenswrapper[4875]: I1007 07:57:00.018312 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:00 crc kubenswrapper[4875]: I1007 07:57:00.018342 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:00 crc kubenswrapper[4875]: I1007 07:57:00.018361 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:00Z","lastTransitionTime":"2025-10-07T07:57:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:00 crc kubenswrapper[4875]: I1007 07:57:00.122174 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:00 crc kubenswrapper[4875]: I1007 07:57:00.122259 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:00 crc kubenswrapper[4875]: I1007 07:57:00.122280 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:00 crc kubenswrapper[4875]: I1007 07:57:00.122334 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:00 crc kubenswrapper[4875]: I1007 07:57:00.122358 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:00Z","lastTransitionTime":"2025-10-07T07:57:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:00 crc kubenswrapper[4875]: I1007 07:57:00.225998 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:00 crc kubenswrapper[4875]: I1007 07:57:00.226079 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:00 crc kubenswrapper[4875]: I1007 07:57:00.226099 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:00 crc kubenswrapper[4875]: I1007 07:57:00.226129 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:00 crc kubenswrapper[4875]: I1007 07:57:00.226150 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:00Z","lastTransitionTime":"2025-10-07T07:57:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:00 crc kubenswrapper[4875]: I1007 07:57:00.329496 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:00 crc kubenswrapper[4875]: I1007 07:57:00.329544 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:00 crc kubenswrapper[4875]: I1007 07:57:00.329557 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:00 crc kubenswrapper[4875]: I1007 07:57:00.329575 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:00 crc kubenswrapper[4875]: I1007 07:57:00.329610 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:00Z","lastTransitionTime":"2025-10-07T07:57:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:00 crc kubenswrapper[4875]: I1007 07:57:00.433289 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:00 crc kubenswrapper[4875]: I1007 07:57:00.433365 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:00 crc kubenswrapper[4875]: I1007 07:57:00.433384 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:00 crc kubenswrapper[4875]: I1007 07:57:00.433411 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:00 crc kubenswrapper[4875]: I1007 07:57:00.433429 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:00Z","lastTransitionTime":"2025-10-07T07:57:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:00 crc kubenswrapper[4875]: I1007 07:57:00.537088 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:00 crc kubenswrapper[4875]: I1007 07:57:00.537182 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:00 crc kubenswrapper[4875]: I1007 07:57:00.537208 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:00 crc kubenswrapper[4875]: I1007 07:57:00.537241 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:00 crc kubenswrapper[4875]: I1007 07:57:00.537266 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:00Z","lastTransitionTime":"2025-10-07T07:57:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:00 crc kubenswrapper[4875]: I1007 07:57:00.642330 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:00 crc kubenswrapper[4875]: I1007 07:57:00.642384 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:00 crc kubenswrapper[4875]: I1007 07:57:00.642398 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:00 crc kubenswrapper[4875]: I1007 07:57:00.642419 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:00 crc kubenswrapper[4875]: I1007 07:57:00.642431 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:00Z","lastTransitionTime":"2025-10-07T07:57:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:00 crc kubenswrapper[4875]: I1007 07:57:00.696797 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-wk8rw" Oct 07 07:57:00 crc kubenswrapper[4875]: E1007 07:57:00.696968 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-wk8rw" podUID="dce21abc-1295-4d45-bd26-07b7e37d674c" Oct 07 07:57:00 crc kubenswrapper[4875]: I1007 07:57:00.697380 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 07:57:00 crc kubenswrapper[4875]: E1007 07:57:00.697683 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 07:57:00 crc kubenswrapper[4875]: I1007 07:57:00.746375 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:00 crc kubenswrapper[4875]: I1007 07:57:00.746442 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:00 crc kubenswrapper[4875]: I1007 07:57:00.746455 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:00 crc kubenswrapper[4875]: I1007 07:57:00.746472 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:00 crc kubenswrapper[4875]: I1007 07:57:00.746502 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:00Z","lastTransitionTime":"2025-10-07T07:57:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:00 crc kubenswrapper[4875]: I1007 07:57:00.850224 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:00 crc kubenswrapper[4875]: I1007 07:57:00.850357 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:00 crc kubenswrapper[4875]: I1007 07:57:00.850383 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:00 crc kubenswrapper[4875]: I1007 07:57:00.850448 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:00 crc kubenswrapper[4875]: I1007 07:57:00.850471 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:00Z","lastTransitionTime":"2025-10-07T07:57:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:00 crc kubenswrapper[4875]: I1007 07:57:00.956550 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:00 crc kubenswrapper[4875]: I1007 07:57:00.956655 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:00 crc kubenswrapper[4875]: I1007 07:57:00.956673 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:00 crc kubenswrapper[4875]: I1007 07:57:00.957395 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:00 crc kubenswrapper[4875]: I1007 07:57:00.957473 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:00Z","lastTransitionTime":"2025-10-07T07:57:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:01 crc kubenswrapper[4875]: I1007 07:57:01.061408 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:01 crc kubenswrapper[4875]: I1007 07:57:01.061501 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:01 crc kubenswrapper[4875]: I1007 07:57:01.061666 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:01 crc kubenswrapper[4875]: I1007 07:57:01.061722 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:01 crc kubenswrapper[4875]: I1007 07:57:01.061742 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:01Z","lastTransitionTime":"2025-10-07T07:57:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:01 crc kubenswrapper[4875]: I1007 07:57:01.165751 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:01 crc kubenswrapper[4875]: I1007 07:57:01.165804 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:01 crc kubenswrapper[4875]: I1007 07:57:01.165814 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:01 crc kubenswrapper[4875]: I1007 07:57:01.165837 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:01 crc kubenswrapper[4875]: I1007 07:57:01.165849 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:01Z","lastTransitionTime":"2025-10-07T07:57:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:01 crc kubenswrapper[4875]: I1007 07:57:01.269574 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:01 crc kubenswrapper[4875]: I1007 07:57:01.269644 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:01 crc kubenswrapper[4875]: I1007 07:57:01.269667 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:01 crc kubenswrapper[4875]: I1007 07:57:01.269697 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:01 crc kubenswrapper[4875]: I1007 07:57:01.269721 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:01Z","lastTransitionTime":"2025-10-07T07:57:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:01 crc kubenswrapper[4875]: I1007 07:57:01.372535 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:01 crc kubenswrapper[4875]: I1007 07:57:01.372584 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:01 crc kubenswrapper[4875]: I1007 07:57:01.372597 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:01 crc kubenswrapper[4875]: I1007 07:57:01.372619 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:01 crc kubenswrapper[4875]: I1007 07:57:01.372634 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:01Z","lastTransitionTime":"2025-10-07T07:57:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:01 crc kubenswrapper[4875]: I1007 07:57:01.475337 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:01 crc kubenswrapper[4875]: I1007 07:57:01.475677 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:01 crc kubenswrapper[4875]: I1007 07:57:01.475746 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:01 crc kubenswrapper[4875]: I1007 07:57:01.475861 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:01 crc kubenswrapper[4875]: I1007 07:57:01.475954 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:01Z","lastTransitionTime":"2025-10-07T07:57:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:01 crc kubenswrapper[4875]: I1007 07:57:01.578744 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:01 crc kubenswrapper[4875]: I1007 07:57:01.578814 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:01 crc kubenswrapper[4875]: I1007 07:57:01.578828 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:01 crc kubenswrapper[4875]: I1007 07:57:01.578849 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:01 crc kubenswrapper[4875]: I1007 07:57:01.578862 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:01Z","lastTransitionTime":"2025-10-07T07:57:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:01 crc kubenswrapper[4875]: I1007 07:57:01.682456 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:01 crc kubenswrapper[4875]: I1007 07:57:01.682543 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:01 crc kubenswrapper[4875]: I1007 07:57:01.682564 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:01 crc kubenswrapper[4875]: I1007 07:57:01.682594 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:01 crc kubenswrapper[4875]: I1007 07:57:01.682615 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:01Z","lastTransitionTime":"2025-10-07T07:57:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:01 crc kubenswrapper[4875]: I1007 07:57:01.696844 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 07:57:01 crc kubenswrapper[4875]: I1007 07:57:01.697239 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 07:57:01 crc kubenswrapper[4875]: E1007 07:57:01.697389 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 07:57:01 crc kubenswrapper[4875]: E1007 07:57:01.698198 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 07:57:01 crc kubenswrapper[4875]: I1007 07:57:01.785944 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:01 crc kubenswrapper[4875]: I1007 07:57:01.786046 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:01 crc kubenswrapper[4875]: I1007 07:57:01.786064 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:01 crc kubenswrapper[4875]: I1007 07:57:01.786091 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:01 crc kubenswrapper[4875]: I1007 07:57:01.786111 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:01Z","lastTransitionTime":"2025-10-07T07:57:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:01 crc kubenswrapper[4875]: I1007 07:57:01.890322 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:01 crc kubenswrapper[4875]: I1007 07:57:01.890411 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:01 crc kubenswrapper[4875]: I1007 07:57:01.890430 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:01 crc kubenswrapper[4875]: I1007 07:57:01.890461 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:01 crc kubenswrapper[4875]: I1007 07:57:01.890483 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:01Z","lastTransitionTime":"2025-10-07T07:57:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:01 crc kubenswrapper[4875]: I1007 07:57:01.996700 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:01 crc kubenswrapper[4875]: I1007 07:57:01.996761 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:01 crc kubenswrapper[4875]: I1007 07:57:01.996777 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:01 crc kubenswrapper[4875]: I1007 07:57:01.996801 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:01 crc kubenswrapper[4875]: I1007 07:57:01.996822 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:01Z","lastTransitionTime":"2025-10-07T07:57:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:02 crc kubenswrapper[4875]: I1007 07:57:02.100797 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:02 crc kubenswrapper[4875]: I1007 07:57:02.100960 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:02 crc kubenswrapper[4875]: I1007 07:57:02.100984 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:02 crc kubenswrapper[4875]: I1007 07:57:02.101408 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:02 crc kubenswrapper[4875]: I1007 07:57:02.101664 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:02Z","lastTransitionTime":"2025-10-07T07:57:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:02 crc kubenswrapper[4875]: I1007 07:57:02.195192 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:02 crc kubenswrapper[4875]: I1007 07:57:02.195244 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:02 crc kubenswrapper[4875]: I1007 07:57:02.195256 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:02 crc kubenswrapper[4875]: I1007 07:57:02.195275 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:02 crc kubenswrapper[4875]: I1007 07:57:02.195288 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:02Z","lastTransitionTime":"2025-10-07T07:57:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:02 crc kubenswrapper[4875]: E1007 07:57:02.214725 4875 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T07:57:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T07:57:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:02Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T07:57:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T07:57:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:02Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"e390eb55-bfef-4c82-ba02-53426a3fd939\\\",\\\"systemUUID\\\":\\\"288fc606-a984-4194-ac49-303e4a239cb9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:02Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:02 crc kubenswrapper[4875]: I1007 07:57:02.219196 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:02 crc kubenswrapper[4875]: I1007 07:57:02.219233 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:02 crc kubenswrapper[4875]: I1007 07:57:02.219246 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:02 crc kubenswrapper[4875]: I1007 07:57:02.219276 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:02 crc kubenswrapper[4875]: I1007 07:57:02.219302 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:02Z","lastTransitionTime":"2025-10-07T07:57:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:02 crc kubenswrapper[4875]: E1007 07:57:02.236101 4875 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T07:57:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T07:57:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:02Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T07:57:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T07:57:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:02Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"e390eb55-bfef-4c82-ba02-53426a3fd939\\\",\\\"systemUUID\\\":\\\"288fc606-a984-4194-ac49-303e4a239cb9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:02Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:02 crc kubenswrapper[4875]: I1007 07:57:02.245093 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:02 crc kubenswrapper[4875]: I1007 07:57:02.245142 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:02 crc kubenswrapper[4875]: I1007 07:57:02.245154 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:02 crc kubenswrapper[4875]: I1007 07:57:02.245172 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:02 crc kubenswrapper[4875]: I1007 07:57:02.245200 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:02Z","lastTransitionTime":"2025-10-07T07:57:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:02 crc kubenswrapper[4875]: E1007 07:57:02.262848 4875 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T07:57:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T07:57:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:02Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T07:57:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T07:57:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:02Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"e390eb55-bfef-4c82-ba02-53426a3fd939\\\",\\\"systemUUID\\\":\\\"288fc606-a984-4194-ac49-303e4a239cb9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:02Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:02 crc kubenswrapper[4875]: I1007 07:57:02.267446 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:02 crc kubenswrapper[4875]: I1007 07:57:02.267600 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:02 crc kubenswrapper[4875]: I1007 07:57:02.267693 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:02 crc kubenswrapper[4875]: I1007 07:57:02.267780 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:02 crc kubenswrapper[4875]: I1007 07:57:02.267856 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:02Z","lastTransitionTime":"2025-10-07T07:57:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:02 crc kubenswrapper[4875]: E1007 07:57:02.287068 4875 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T07:57:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T07:57:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:02Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T07:57:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T07:57:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:02Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"e390eb55-bfef-4c82-ba02-53426a3fd939\\\",\\\"systemUUID\\\":\\\"288fc606-a984-4194-ac49-303e4a239cb9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:02Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:02 crc kubenswrapper[4875]: I1007 07:57:02.292501 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:02 crc kubenswrapper[4875]: I1007 07:57:02.292566 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:02 crc kubenswrapper[4875]: I1007 07:57:02.292581 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:02 crc kubenswrapper[4875]: I1007 07:57:02.292605 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:02 crc kubenswrapper[4875]: I1007 07:57:02.292622 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:02Z","lastTransitionTime":"2025-10-07T07:57:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:02 crc kubenswrapper[4875]: E1007 07:57:02.307744 4875 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T07:57:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T07:57:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:02Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T07:57:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T07:57:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:02Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"e390eb55-bfef-4c82-ba02-53426a3fd939\\\",\\\"systemUUID\\\":\\\"288fc606-a984-4194-ac49-303e4a239cb9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:02Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:02 crc kubenswrapper[4875]: E1007 07:57:02.307930 4875 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Oct 07 07:57:02 crc kubenswrapper[4875]: I1007 07:57:02.310001 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:02 crc kubenswrapper[4875]: I1007 07:57:02.310051 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:02 crc kubenswrapper[4875]: I1007 07:57:02.310065 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:02 crc kubenswrapper[4875]: I1007 07:57:02.310090 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:02 crc kubenswrapper[4875]: I1007 07:57:02.310104 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:02Z","lastTransitionTime":"2025-10-07T07:57:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:02 crc kubenswrapper[4875]: I1007 07:57:02.413410 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:02 crc kubenswrapper[4875]: I1007 07:57:02.413499 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:02 crc kubenswrapper[4875]: I1007 07:57:02.413525 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:02 crc kubenswrapper[4875]: I1007 07:57:02.413551 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:02 crc kubenswrapper[4875]: I1007 07:57:02.413567 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:02Z","lastTransitionTime":"2025-10-07T07:57:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:02 crc kubenswrapper[4875]: I1007 07:57:02.520815 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:02 crc kubenswrapper[4875]: I1007 07:57:02.520913 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:02 crc kubenswrapper[4875]: I1007 07:57:02.520933 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:02 crc kubenswrapper[4875]: I1007 07:57:02.520960 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:02 crc kubenswrapper[4875]: I1007 07:57:02.520979 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:02Z","lastTransitionTime":"2025-10-07T07:57:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:02 crc kubenswrapper[4875]: I1007 07:57:02.624639 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:02 crc kubenswrapper[4875]: I1007 07:57:02.624706 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:02 crc kubenswrapper[4875]: I1007 07:57:02.624720 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:02 crc kubenswrapper[4875]: I1007 07:57:02.624940 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:02 crc kubenswrapper[4875]: I1007 07:57:02.624952 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:02Z","lastTransitionTime":"2025-10-07T07:57:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:02 crc kubenswrapper[4875]: I1007 07:57:02.697369 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-wk8rw" Oct 07 07:57:02 crc kubenswrapper[4875]: I1007 07:57:02.697463 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 07:57:02 crc kubenswrapper[4875]: E1007 07:57:02.697596 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-wk8rw" podUID="dce21abc-1295-4d45-bd26-07b7e37d674c" Oct 07 07:57:02 crc kubenswrapper[4875]: E1007 07:57:02.697766 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 07:57:02 crc kubenswrapper[4875]: I1007 07:57:02.727507 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:02 crc kubenswrapper[4875]: I1007 07:57:02.727543 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:02 crc kubenswrapper[4875]: I1007 07:57:02.727552 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:02 crc kubenswrapper[4875]: I1007 07:57:02.727576 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:02 crc kubenswrapper[4875]: I1007 07:57:02.727586 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:02Z","lastTransitionTime":"2025-10-07T07:57:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:02 crc kubenswrapper[4875]: I1007 07:57:02.830482 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:02 crc kubenswrapper[4875]: I1007 07:57:02.830513 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:02 crc kubenswrapper[4875]: I1007 07:57:02.830521 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:02 crc kubenswrapper[4875]: I1007 07:57:02.830533 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:02 crc kubenswrapper[4875]: I1007 07:57:02.830541 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:02Z","lastTransitionTime":"2025-10-07T07:57:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:02 crc kubenswrapper[4875]: I1007 07:57:02.933405 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:02 crc kubenswrapper[4875]: I1007 07:57:02.933473 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:02 crc kubenswrapper[4875]: I1007 07:57:02.933501 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:02 crc kubenswrapper[4875]: I1007 07:57:02.933532 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:02 crc kubenswrapper[4875]: I1007 07:57:02.933553 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:02Z","lastTransitionTime":"2025-10-07T07:57:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:03 crc kubenswrapper[4875]: I1007 07:57:03.036170 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:03 crc kubenswrapper[4875]: I1007 07:57:03.036206 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:03 crc kubenswrapper[4875]: I1007 07:57:03.036215 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:03 crc kubenswrapper[4875]: I1007 07:57:03.036228 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:03 crc kubenswrapper[4875]: I1007 07:57:03.036236 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:03Z","lastTransitionTime":"2025-10-07T07:57:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:03 crc kubenswrapper[4875]: I1007 07:57:03.139575 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:03 crc kubenswrapper[4875]: I1007 07:57:03.139647 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:03 crc kubenswrapper[4875]: I1007 07:57:03.139669 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:03 crc kubenswrapper[4875]: I1007 07:57:03.139698 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:03 crc kubenswrapper[4875]: I1007 07:57:03.139719 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:03Z","lastTransitionTime":"2025-10-07T07:57:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:03 crc kubenswrapper[4875]: I1007 07:57:03.242233 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:03 crc kubenswrapper[4875]: I1007 07:57:03.242287 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:03 crc kubenswrapper[4875]: I1007 07:57:03.242300 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:03 crc kubenswrapper[4875]: I1007 07:57:03.242320 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:03 crc kubenswrapper[4875]: I1007 07:57:03.242333 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:03Z","lastTransitionTime":"2025-10-07T07:57:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:03 crc kubenswrapper[4875]: I1007 07:57:03.345915 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:03 crc kubenswrapper[4875]: I1007 07:57:03.345993 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:03 crc kubenswrapper[4875]: I1007 07:57:03.346012 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:03 crc kubenswrapper[4875]: I1007 07:57:03.346043 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:03 crc kubenswrapper[4875]: I1007 07:57:03.346061 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:03Z","lastTransitionTime":"2025-10-07T07:57:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:03 crc kubenswrapper[4875]: I1007 07:57:03.449046 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:03 crc kubenswrapper[4875]: I1007 07:57:03.449159 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:03 crc kubenswrapper[4875]: I1007 07:57:03.449186 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:03 crc kubenswrapper[4875]: I1007 07:57:03.449216 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:03 crc kubenswrapper[4875]: I1007 07:57:03.449239 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:03Z","lastTransitionTime":"2025-10-07T07:57:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:03 crc kubenswrapper[4875]: I1007 07:57:03.552643 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:03 crc kubenswrapper[4875]: I1007 07:57:03.552722 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:03 crc kubenswrapper[4875]: I1007 07:57:03.552746 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:03 crc kubenswrapper[4875]: I1007 07:57:03.552781 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:03 crc kubenswrapper[4875]: I1007 07:57:03.552803 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:03Z","lastTransitionTime":"2025-10-07T07:57:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:03 crc kubenswrapper[4875]: I1007 07:57:03.656486 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:03 crc kubenswrapper[4875]: I1007 07:57:03.656535 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:03 crc kubenswrapper[4875]: I1007 07:57:03.656549 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:03 crc kubenswrapper[4875]: I1007 07:57:03.656571 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:03 crc kubenswrapper[4875]: I1007 07:57:03.656583 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:03Z","lastTransitionTime":"2025-10-07T07:57:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:03 crc kubenswrapper[4875]: I1007 07:57:03.696720 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 07:57:03 crc kubenswrapper[4875]: I1007 07:57:03.696858 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 07:57:03 crc kubenswrapper[4875]: E1007 07:57:03.696996 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 07:57:03 crc kubenswrapper[4875]: E1007 07:57:03.697085 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 07:57:03 crc kubenswrapper[4875]: I1007 07:57:03.759358 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:03 crc kubenswrapper[4875]: I1007 07:57:03.759395 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:03 crc kubenswrapper[4875]: I1007 07:57:03.759404 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:03 crc kubenswrapper[4875]: I1007 07:57:03.759419 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:03 crc kubenswrapper[4875]: I1007 07:57:03.759428 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:03Z","lastTransitionTime":"2025-10-07T07:57:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:03 crc kubenswrapper[4875]: I1007 07:57:03.861775 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:03 crc kubenswrapper[4875]: I1007 07:57:03.861813 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:03 crc kubenswrapper[4875]: I1007 07:57:03.861823 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:03 crc kubenswrapper[4875]: I1007 07:57:03.861836 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:03 crc kubenswrapper[4875]: I1007 07:57:03.861846 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:03Z","lastTransitionTime":"2025-10-07T07:57:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:03 crc kubenswrapper[4875]: I1007 07:57:03.963991 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:03 crc kubenswrapper[4875]: I1007 07:57:03.964047 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:03 crc kubenswrapper[4875]: I1007 07:57:03.964062 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:03 crc kubenswrapper[4875]: I1007 07:57:03.964077 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:03 crc kubenswrapper[4875]: I1007 07:57:03.964087 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:03Z","lastTransitionTime":"2025-10-07T07:57:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:04 crc kubenswrapper[4875]: I1007 07:57:04.066289 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:04 crc kubenswrapper[4875]: I1007 07:57:04.066327 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:04 crc kubenswrapper[4875]: I1007 07:57:04.066335 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:04 crc kubenswrapper[4875]: I1007 07:57:04.066347 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:04 crc kubenswrapper[4875]: I1007 07:57:04.066356 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:04Z","lastTransitionTime":"2025-10-07T07:57:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:04 crc kubenswrapper[4875]: I1007 07:57:04.169336 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:04 crc kubenswrapper[4875]: I1007 07:57:04.169376 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:04 crc kubenswrapper[4875]: I1007 07:57:04.169387 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:04 crc kubenswrapper[4875]: I1007 07:57:04.169406 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:04 crc kubenswrapper[4875]: I1007 07:57:04.169417 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:04Z","lastTransitionTime":"2025-10-07T07:57:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:04 crc kubenswrapper[4875]: I1007 07:57:04.272950 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:04 crc kubenswrapper[4875]: I1007 07:57:04.273064 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:04 crc kubenswrapper[4875]: I1007 07:57:04.273090 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:04 crc kubenswrapper[4875]: I1007 07:57:04.273122 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:04 crc kubenswrapper[4875]: I1007 07:57:04.273147 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:04Z","lastTransitionTime":"2025-10-07T07:57:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:04 crc kubenswrapper[4875]: I1007 07:57:04.375712 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:04 crc kubenswrapper[4875]: I1007 07:57:04.375770 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:04 crc kubenswrapper[4875]: I1007 07:57:04.375789 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:04 crc kubenswrapper[4875]: I1007 07:57:04.375813 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:04 crc kubenswrapper[4875]: I1007 07:57:04.375830 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:04Z","lastTransitionTime":"2025-10-07T07:57:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:04 crc kubenswrapper[4875]: I1007 07:57:04.479298 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:04 crc kubenswrapper[4875]: I1007 07:57:04.479380 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:04 crc kubenswrapper[4875]: I1007 07:57:04.479400 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:04 crc kubenswrapper[4875]: I1007 07:57:04.479432 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:04 crc kubenswrapper[4875]: I1007 07:57:04.479456 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:04Z","lastTransitionTime":"2025-10-07T07:57:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:04 crc kubenswrapper[4875]: I1007 07:57:04.504078 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 07 07:57:04 crc kubenswrapper[4875]: I1007 07:57:04.514793 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler/openshift-kube-scheduler-crc"] Oct 07 07:57:04 crc kubenswrapper[4875]: I1007 07:57:04.527031 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:04Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:04 crc kubenswrapper[4875]: I1007 07:57:04.547162 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-zk2kz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7021fc56-b485-4ca6-80e8-56665ade004f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff571e5d1552af3387066929e3c02a939848e65fbf853aec600b03e0f72d5d3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d91377f30f2c7676df79d679bad49e0a162667214bd7e6ed4ad8ef3606a8752\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d91377f30f2c7676df79d679bad49e0a162667214bd7e6ed4ad8ef3606a8752\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecf0876b0824ab4fd9c692b0455050ac0a2d61663f74f13f578aebd6f3869f3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ecf0876b0824ab4fd9c692b0455050ac0a2d61663f74f13f578aebd6f3869f3f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9714afc60a059f6aa35d965ae1e15ea03e01cf9162a44d632ceed9d772e966bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9714afc60a059f6aa35d965ae1e15ea03e01cf9162a44d632ceed9d772e966bf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1f6109783534c4c80c81daf9eb0f04a9fbced0b143aeaa0c76683b9d2d38669\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c1f6109783534c4c80c81daf9eb0f04a9fbced0b143aeaa0c76683b9d2d38669\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c07a8fe8d39f3de33d3b132c33386a5c63a0948a1205f48e9a8788dd77ba8bc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c07a8fe8d39f3de33d3b132c33386a5c63a0948a1205f48e9a8788dd77ba8bc2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bce803064d5256265cdaf7f6e16d02780eab07d41d1f547dedd1c1b85972f543\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bce803064d5256265cdaf7f6e16d02780eab07d41d1f547dedd1c1b85972f543\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-zk2kz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:04Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:04 crc kubenswrapper[4875]: I1007 07:57:04.567508 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62ae4c53-7cf1-4229-84b4-045397dbcfba\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ae8881986c1b2abd841adfadbac7dc3d73096c5366485350536c08f478f9762\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://05f20f2103458077317614bcd5338d803c52c67e7dfc562103c817d33fa5bc79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f2213a20d36b8a125c6004957d5be402e0b85bbc2165ebd964ab499dfffb877\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://841f5c2218ad3912bf23dc2df80f24ca696829ba8e2c2d9722264688b25f0846\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:15Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:04Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:04 crc kubenswrapper[4875]: I1007 07:57:04.583357 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:04 crc kubenswrapper[4875]: I1007 07:57:04.583523 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:04 crc kubenswrapper[4875]: I1007 07:57:04.583725 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:04 crc kubenswrapper[4875]: I1007 07:57:04.583776 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:04 crc kubenswrapper[4875]: I1007 07:57:04.583800 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:04Z","lastTransitionTime":"2025-10-07T07:57:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:04 crc kubenswrapper[4875]: I1007 07:57:04.598092 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3527b3b9-1734-481d-9d80-9093e388afec\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7149243f8781d5a5e845821a8016bc06ec3678d69dfbde72d3429c48c64a98b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51d55feee1c9378afae0885cd063f39a2b4bd057db72c776941837d8d4098132\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6dc29635af9843109e3c200fc756d352e1a81b01dcc52a30a005291d1fa9f8d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e0b3573713f1b9d23177e5a1412d8db366ca718e6c19f6af9e234940505f2e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20382715fd9d79a3b60b16d950d6c4fe5b0d34ae14ad034f684d97762205263a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cfae59bc7d6183b2b5c6c04dd53b1bfc8b62abf90f67f74c7a56bf5039a5b50a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cfae59bc7d6183b2b5c6c04dd53b1bfc8b62abf90f67f74c7a56bf5039a5b50a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88968e9f639dc3664a87729128c531283b37b7c2da6aae70905f9f63a980fb54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88968e9f639dc3664a87729128c531283b37b7c2da6aae70905f9f63a980fb54\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://1ae0cbc996e8c2d24e8fbbf34b292d994bcc704c7b27fdc8b7fb7e64be0902ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1ae0cbc996e8c2d24e8fbbf34b292d994bcc704c7b27fdc8b7fb7e64be0902ca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:15Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:04Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:04 crc kubenswrapper[4875]: I1007 07:57:04.613143 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://455d92e6c0a43eabcbdc94940cb5bb0308a747e311fb79711b5ec8586d2f290c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e98da4809cdbbaa0bc7a3ed73df6eeb9260101f8336e16e3e93cba2be423f957\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:04Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:04 crc kubenswrapper[4875]: I1007 07:57:04.635251 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-8tcxj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f7806e48-48e7-4680-af2e-e93a05003370\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e5e9d96fa7102ca0dcbb0fcc156b21899217a32baae85be23379cca6c8a6a93c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2df6aadbe568d17b58738f194596f0e83432176769f7c5bd87e7d4000d3f1ed8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5eb9157d6f41a164c7cb952c983b441030a97a8f3b8009359440c5eba9846f26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8c31cbcb48e003dac873f108a5eb3aaff8d7016f7557fb683ae607d7819d7ceb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c944396320312ab92eb8dd7f1d771a00b965031717edbbeb2b467cee0788dec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6db9e77922c120dbfa1ccd55c96777ac3b18e0fcf8bb96956414858ae2c8fcff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://66cb029e42151e4a186166d27962b933e0e0e9c110f07ffe602c276fb7f8f504\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://66cb029e42151e4a186166d27962b933e0e0e9c110f07ffe602c276fb7f8f504\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-07T07:56:49Z\\\",\\\"message\\\":\\\"ReadOnlyMounts:*true,UserNamespaces:*true,},},NodeRuntimeHandler{Name:,Features:\\\\u0026NodeRuntimeHandlerFeatures{RecursiveReadOnlyMounts:*true,UserNamespaces:*true,},},NodeRuntimeHandler{Name:runc,Features:\\\\u0026NodeRuntimeHandlerFeatures{RecursiveReadOnlyMounts:*true,UserNamespaces:*false,},},},Features:nil,},}\\\\nI1007 07:56:48.894892 6286 egressqos.go:1009] Finished syncing EgressQoS node crc : 744.984µs\\\\nI1007 07:56:48.895270 6286 handler.go:208] Removed *v1.Node event handler 2\\\\nI1007 07:56:48.895720 6286 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1007 07:56:48.895756 6286 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1007 07:56:48.895762 6286 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1007 07:56:48.895780 6286 factory.go:656] Stopping watch factory\\\\nI1007 07:56:48.895802 6286 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1007 07:56:48.895817 6286 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1007 07:56:48.895825 6286 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1007 07:56:48.897786 6286 ovnkube.go:599] Stopped ovnkube\\\\nI1007 07:56:48.897999 6286 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1007 07:56:48.898118 6286 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:48Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-8tcxj_openshift-ovn-kubernetes(f7806e48-48e7-4680-af2e-e93a05003370)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b3b1269130a0a1192546bec4aa5b2d4bfd65f381f6cd3948dd54d49b482e6d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e7cdfd8fed2f21e7063841e5382e668130288feffd52ef6fc5f9b5bc1894f2a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e7cdfd8fed2f21e7063841e5382e668130288feffd52ef6fc5f9b5bc1894f2a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-8tcxj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:04Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:04 crc kubenswrapper[4875]: I1007 07:57:04.649151 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ddx6l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0605d52-0cfc-4bcf-9218-1991257047cd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bdc7823b5b4f40adc245f9650864ebcb03f791dcbd6167f41c362bcdcc3a6655\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l9bwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:39Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ddx6l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:04Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:04 crc kubenswrapper[4875]: I1007 07:57:04.663691 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wc2jq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e5a790e1-c591-4cfc-930f-4805a923790b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0fda9fd366641ca5799c0ca6c7aa926af6b41609e157c11a15b2077d4a5ce3aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4s782\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wc2jq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:04Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:04 crc kubenswrapper[4875]: I1007 07:57:04.678088 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3928c10c-c3da-41eb-96b2-629d67cfb31f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://27e5a1d7a65296f23b224f9ace2760c32b6e1ca146cf7a42a601de22632250d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qd4f6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34b235fca1c383e868641a953bb908901bcd10528ee9fb668c1f71eb6e0e978a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qd4f6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hx68m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:04Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:04 crc kubenswrapper[4875]: I1007 07:57:04.686549 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:04 crc kubenswrapper[4875]: I1007 07:57:04.686616 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:04 crc kubenswrapper[4875]: I1007 07:57:04.686640 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:04 crc kubenswrapper[4875]: I1007 07:57:04.686673 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:04 crc kubenswrapper[4875]: I1007 07:57:04.686698 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:04Z","lastTransitionTime":"2025-10-07T07:57:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:04 crc kubenswrapper[4875]: I1007 07:57:04.695394 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rbr4j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1d636971-3387-4f3c-b4a1-54a1da1e2fbe\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c836128e294716bdbbc7bf57f81f71a88656e83be509510722d13b3f66e7d3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7bd9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://12ef6aa200be5c0cc9b34435271b54e4852ad9118d7334d57e07c1f6f75100f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7bd9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:48Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-rbr4j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:04Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:04 crc kubenswrapper[4875]: I1007 07:57:04.696512 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 07:57:04 crc kubenswrapper[4875]: E1007 07:57:04.696670 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 07:57:04 crc kubenswrapper[4875]: I1007 07:57:04.696521 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-wk8rw" Oct 07 07:57:04 crc kubenswrapper[4875]: E1007 07:57:04.696837 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-wk8rw" podUID="dce21abc-1295-4d45-bd26-07b7e37d674c" Oct 07 07:57:04 crc kubenswrapper[4875]: I1007 07:57:04.712525 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"01cd6f59-4f16-41d3-b43b-cd3cb9efd9b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ca1c70fa4c2dd9e87cc15766e27c2735768f392d019d6004ae50c94595651c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4bed5cf79847998d3d72c69b2f5773d64affb1a1f13ecc3af99bda8307bb8d1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ba6ab5ec6a70a00059ba119970c6b139879c3c568475f2acf45b00f4ade9e22\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10e2645f8bd2ed4e578809e1a15c65b5a0bf19a2bc135dd5f626b2b3dfce0113\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://671f4d3e518bf0230af61a6bfc3f70b8cea7f0e8f972ad605a78cded0306b084\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"message\\\":\\\"le observer\\\\nW1007 07:56:35.379471 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1007 07:56:35.379613 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1007 07:56:35.380419 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1733405231/tls.crt::/tmp/serving-cert-1733405231/tls.key\\\\\\\"\\\\nI1007 07:56:35.960385 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1007 07:56:35.963357 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1007 07:56:35.963380 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1007 07:56:35.963401 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1007 07:56:35.963407 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1007 07:56:35.978178 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1007 07:56:35.978231 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1007 07:56:35.978242 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI1007 07:56:35.978236 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1007 07:56:35.978254 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1007 07:56:35.978304 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1007 07:56:35.978312 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1007 07:56:35.978319 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1007 07:56:35.979012 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0b72e1d82ba1cfc118ea122535db5c358f503a17b497c21c481e1b56bb578e8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec502d866371b40ecc3c8728889725cc1c2d9c8be18dbce17935992980fc1b9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec502d866371b40ecc3c8728889725cc1c2d9c8be18dbce17935992980fc1b9a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:04Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:04 crc kubenswrapper[4875]: I1007 07:57:04.725709 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:04Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:04 crc kubenswrapper[4875]: I1007 07:57:04.742165 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43e570e7b8391b0f38f3228dc0380ac8ee23e83f498989343bdee6a53c3ede41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:04Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:04 crc kubenswrapper[4875]: I1007 07:57:04.757212 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01fe5bea18c473e716831e2a9a876a8517d6de78b9b870929ecc1c4397d33fb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:04Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:04 crc kubenswrapper[4875]: I1007 07:57:04.770359 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-wk8rw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dce21abc-1295-4d45-bd26-07b7e37d674c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:50Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:50Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jx7vv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jx7vv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:50Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-wk8rw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:04Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:04 crc kubenswrapper[4875]: I1007 07:57:04.784941 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:04Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:04 crc kubenswrapper[4875]: I1007 07:57:04.788914 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:04 crc kubenswrapper[4875]: I1007 07:57:04.788957 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:04 crc kubenswrapper[4875]: I1007 07:57:04.788972 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:04 crc kubenswrapper[4875]: I1007 07:57:04.788992 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:04 crc kubenswrapper[4875]: I1007 07:57:04.789006 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:04Z","lastTransitionTime":"2025-10-07T07:57:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:04 crc kubenswrapper[4875]: I1007 07:57:04.798368 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-9tw9m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f8b02a8a-b8d2-4097-b768-132e4c46938a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f6efce0afada950650dfd1a7345a5132509ead755458e53915fcf3335ffe73e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-9tw9m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:04Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:04 crc kubenswrapper[4875]: I1007 07:57:04.891996 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:04 crc kubenswrapper[4875]: I1007 07:57:04.892062 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:04 crc kubenswrapper[4875]: I1007 07:57:04.892078 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:04 crc kubenswrapper[4875]: I1007 07:57:04.892104 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:04 crc kubenswrapper[4875]: I1007 07:57:04.892122 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:04Z","lastTransitionTime":"2025-10-07T07:57:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:04 crc kubenswrapper[4875]: I1007 07:57:04.995667 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:04 crc kubenswrapper[4875]: I1007 07:57:04.995736 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:04 crc kubenswrapper[4875]: I1007 07:57:04.995759 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:04 crc kubenswrapper[4875]: I1007 07:57:04.995789 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:04 crc kubenswrapper[4875]: I1007 07:57:04.995806 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:04Z","lastTransitionTime":"2025-10-07T07:57:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:05 crc kubenswrapper[4875]: I1007 07:57:05.098445 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:05 crc kubenswrapper[4875]: I1007 07:57:05.098477 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:05 crc kubenswrapper[4875]: I1007 07:57:05.098491 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:05 crc kubenswrapper[4875]: I1007 07:57:05.098509 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:05 crc kubenswrapper[4875]: I1007 07:57:05.098522 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:05Z","lastTransitionTime":"2025-10-07T07:57:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:05 crc kubenswrapper[4875]: I1007 07:57:05.201411 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:05 crc kubenswrapper[4875]: I1007 07:57:05.201478 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:05 crc kubenswrapper[4875]: I1007 07:57:05.201496 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:05 crc kubenswrapper[4875]: I1007 07:57:05.201524 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:05 crc kubenswrapper[4875]: I1007 07:57:05.201544 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:05Z","lastTransitionTime":"2025-10-07T07:57:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:05 crc kubenswrapper[4875]: I1007 07:57:05.304783 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:05 crc kubenswrapper[4875]: I1007 07:57:05.304837 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:05 crc kubenswrapper[4875]: I1007 07:57:05.304853 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:05 crc kubenswrapper[4875]: I1007 07:57:05.304911 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:05 crc kubenswrapper[4875]: I1007 07:57:05.304932 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:05Z","lastTransitionTime":"2025-10-07T07:57:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:05 crc kubenswrapper[4875]: I1007 07:57:05.408277 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:05 crc kubenswrapper[4875]: I1007 07:57:05.408330 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:05 crc kubenswrapper[4875]: I1007 07:57:05.408346 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:05 crc kubenswrapper[4875]: I1007 07:57:05.408369 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:05 crc kubenswrapper[4875]: I1007 07:57:05.408385 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:05Z","lastTransitionTime":"2025-10-07T07:57:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:05 crc kubenswrapper[4875]: I1007 07:57:05.511248 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:05 crc kubenswrapper[4875]: I1007 07:57:05.511310 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:05 crc kubenswrapper[4875]: I1007 07:57:05.511328 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:05 crc kubenswrapper[4875]: I1007 07:57:05.511352 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:05 crc kubenswrapper[4875]: I1007 07:57:05.511371 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:05Z","lastTransitionTime":"2025-10-07T07:57:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:05 crc kubenswrapper[4875]: I1007 07:57:05.614471 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:05 crc kubenswrapper[4875]: I1007 07:57:05.614533 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:05 crc kubenswrapper[4875]: I1007 07:57:05.614550 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:05 crc kubenswrapper[4875]: I1007 07:57:05.614576 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:05 crc kubenswrapper[4875]: I1007 07:57:05.614595 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:05Z","lastTransitionTime":"2025-10-07T07:57:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:05 crc kubenswrapper[4875]: I1007 07:57:05.696773 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 07:57:05 crc kubenswrapper[4875]: I1007 07:57:05.696934 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 07:57:05 crc kubenswrapper[4875]: E1007 07:57:05.697012 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 07:57:05 crc kubenswrapper[4875]: E1007 07:57:05.697284 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 07:57:05 crc kubenswrapper[4875]: I1007 07:57:05.718509 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:05 crc kubenswrapper[4875]: I1007 07:57:05.718604 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:05 crc kubenswrapper[4875]: I1007 07:57:05.718631 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:05 crc kubenswrapper[4875]: I1007 07:57:05.718663 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:05 crc kubenswrapper[4875]: I1007 07:57:05.718687 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:05Z","lastTransitionTime":"2025-10-07T07:57:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:05 crc kubenswrapper[4875]: I1007 07:57:05.725072 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:05Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:05 crc kubenswrapper[4875]: I1007 07:57:05.743694 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-9tw9m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f8b02a8a-b8d2-4097-b768-132e4c46938a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f6efce0afada950650dfd1a7345a5132509ead755458e53915fcf3335ffe73e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-9tw9m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:05Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:05 crc kubenswrapper[4875]: I1007 07:57:05.765194 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4ed41f5e-e186-4ede-a0a5-d5a015781ba1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce6527e17c61ccce5c81a87b9a6d3139c9f17a0736c08dfbb8907610d6270adb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c8f4b6a57610b6440afa3652ea5c2926072ad3bc658c72c30c8d28a2832cf900\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8a810dc9c2d8d92af77e0b4c1a21ab24e7da3de04f9c1b00b8399ec0b2cf4ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://965fc2ef9b26d12ee6c5e6adb88200cb52574c8f861f08d1f767c23ea861e6ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://965fc2ef9b26d12ee6c5e6adb88200cb52574c8f861f08d1f767c23ea861e6ef\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:16Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:15Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:05Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:05 crc kubenswrapper[4875]: I1007 07:57:05.787917 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:05Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:05 crc kubenswrapper[4875]: I1007 07:57:05.815456 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-zk2kz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7021fc56-b485-4ca6-80e8-56665ade004f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff571e5d1552af3387066929e3c02a939848e65fbf853aec600b03e0f72d5d3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d91377f30f2c7676df79d679bad49e0a162667214bd7e6ed4ad8ef3606a8752\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d91377f30f2c7676df79d679bad49e0a162667214bd7e6ed4ad8ef3606a8752\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecf0876b0824ab4fd9c692b0455050ac0a2d61663f74f13f578aebd6f3869f3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ecf0876b0824ab4fd9c692b0455050ac0a2d61663f74f13f578aebd6f3869f3f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9714afc60a059f6aa35d965ae1e15ea03e01cf9162a44d632ceed9d772e966bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9714afc60a059f6aa35d965ae1e15ea03e01cf9162a44d632ceed9d772e966bf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1f6109783534c4c80c81daf9eb0f04a9fbced0b143aeaa0c76683b9d2d38669\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c1f6109783534c4c80c81daf9eb0f04a9fbced0b143aeaa0c76683b9d2d38669\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c07a8fe8d39f3de33d3b132c33386a5c63a0948a1205f48e9a8788dd77ba8bc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c07a8fe8d39f3de33d3b132c33386a5c63a0948a1205f48e9a8788dd77ba8bc2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bce803064d5256265cdaf7f6e16d02780eab07d41d1f547dedd1c1b85972f543\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bce803064d5256265cdaf7f6e16d02780eab07d41d1f547dedd1c1b85972f543\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-zk2kz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:05Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:05 crc kubenswrapper[4875]: I1007 07:57:05.822224 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:05 crc kubenswrapper[4875]: I1007 07:57:05.822279 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:05 crc kubenswrapper[4875]: I1007 07:57:05.822297 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:05 crc kubenswrapper[4875]: I1007 07:57:05.822321 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:05 crc kubenswrapper[4875]: I1007 07:57:05.822339 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:05Z","lastTransitionTime":"2025-10-07T07:57:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:05 crc kubenswrapper[4875]: I1007 07:57:05.836852 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62ae4c53-7cf1-4229-84b4-045397dbcfba\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ae8881986c1b2abd841adfadbac7dc3d73096c5366485350536c08f478f9762\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://05f20f2103458077317614bcd5338d803c52c67e7dfc562103c817d33fa5bc79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f2213a20d36b8a125c6004957d5be402e0b85bbc2165ebd964ab499dfffb877\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://841f5c2218ad3912bf23dc2df80f24ca696829ba8e2c2d9722264688b25f0846\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:15Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:05Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:05 crc kubenswrapper[4875]: I1007 07:57:05.870187 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3527b3b9-1734-481d-9d80-9093e388afec\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7149243f8781d5a5e845821a8016bc06ec3678d69dfbde72d3429c48c64a98b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51d55feee1c9378afae0885cd063f39a2b4bd057db72c776941837d8d4098132\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6dc29635af9843109e3c200fc756d352e1a81b01dcc52a30a005291d1fa9f8d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e0b3573713f1b9d23177e5a1412d8db366ca718e6c19f6af9e234940505f2e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20382715fd9d79a3b60b16d950d6c4fe5b0d34ae14ad034f684d97762205263a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cfae59bc7d6183b2b5c6c04dd53b1bfc8b62abf90f67f74c7a56bf5039a5b50a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cfae59bc7d6183b2b5c6c04dd53b1bfc8b62abf90f67f74c7a56bf5039a5b50a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88968e9f639dc3664a87729128c531283b37b7c2da6aae70905f9f63a980fb54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88968e9f639dc3664a87729128c531283b37b7c2da6aae70905f9f63a980fb54\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://1ae0cbc996e8c2d24e8fbbf34b292d994bcc704c7b27fdc8b7fb7e64be0902ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1ae0cbc996e8c2d24e8fbbf34b292d994bcc704c7b27fdc8b7fb7e64be0902ca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:15Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:05Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:05 crc kubenswrapper[4875]: I1007 07:57:05.892125 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://455d92e6c0a43eabcbdc94940cb5bb0308a747e311fb79711b5ec8586d2f290c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e98da4809cdbbaa0bc7a3ed73df6eeb9260101f8336e16e3e93cba2be423f957\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:05Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:05 crc kubenswrapper[4875]: I1007 07:57:05.925487 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:05 crc kubenswrapper[4875]: I1007 07:57:05.925558 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:05 crc kubenswrapper[4875]: I1007 07:57:05.925584 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:05 crc kubenswrapper[4875]: I1007 07:57:05.925619 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:05 crc kubenswrapper[4875]: I1007 07:57:05.925647 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:05Z","lastTransitionTime":"2025-10-07T07:57:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:05 crc kubenswrapper[4875]: I1007 07:57:05.927603 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-8tcxj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f7806e48-48e7-4680-af2e-e93a05003370\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e5e9d96fa7102ca0dcbb0fcc156b21899217a32baae85be23379cca6c8a6a93c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2df6aadbe568d17b58738f194596f0e83432176769f7c5bd87e7d4000d3f1ed8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5eb9157d6f41a164c7cb952c983b441030a97a8f3b8009359440c5eba9846f26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8c31cbcb48e003dac873f108a5eb3aaff8d7016f7557fb683ae607d7819d7ceb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c944396320312ab92eb8dd7f1d771a00b965031717edbbeb2b467cee0788dec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6db9e77922c120dbfa1ccd55c96777ac3b18e0fcf8bb96956414858ae2c8fcff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://66cb029e42151e4a186166d27962b933e0e0e9c110f07ffe602c276fb7f8f504\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://66cb029e42151e4a186166d27962b933e0e0e9c110f07ffe602c276fb7f8f504\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-07T07:56:49Z\\\",\\\"message\\\":\\\"ReadOnlyMounts:*true,UserNamespaces:*true,},},NodeRuntimeHandler{Name:,Features:\\\\u0026NodeRuntimeHandlerFeatures{RecursiveReadOnlyMounts:*true,UserNamespaces:*true,},},NodeRuntimeHandler{Name:runc,Features:\\\\u0026NodeRuntimeHandlerFeatures{RecursiveReadOnlyMounts:*true,UserNamespaces:*false,},},},Features:nil,},}\\\\nI1007 07:56:48.894892 6286 egressqos.go:1009] Finished syncing EgressQoS node crc : 744.984µs\\\\nI1007 07:56:48.895270 6286 handler.go:208] Removed *v1.Node event handler 2\\\\nI1007 07:56:48.895720 6286 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1007 07:56:48.895756 6286 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1007 07:56:48.895762 6286 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1007 07:56:48.895780 6286 factory.go:656] Stopping watch factory\\\\nI1007 07:56:48.895802 6286 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1007 07:56:48.895817 6286 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1007 07:56:48.895825 6286 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1007 07:56:48.897786 6286 ovnkube.go:599] Stopped ovnkube\\\\nI1007 07:56:48.897999 6286 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1007 07:56:48.898118 6286 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:48Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-8tcxj_openshift-ovn-kubernetes(f7806e48-48e7-4680-af2e-e93a05003370)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b3b1269130a0a1192546bec4aa5b2d4bfd65f381f6cd3948dd54d49b482e6d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e7cdfd8fed2f21e7063841e5382e668130288feffd52ef6fc5f9b5bc1894f2a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e7cdfd8fed2f21e7063841e5382e668130288feffd52ef6fc5f9b5bc1894f2a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-8tcxj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:05Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:05 crc kubenswrapper[4875]: I1007 07:57:05.944410 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ddx6l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0605d52-0cfc-4bcf-9218-1991257047cd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bdc7823b5b4f40adc245f9650864ebcb03f791dcbd6167f41c362bcdcc3a6655\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l9bwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:39Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ddx6l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:05Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:05 crc kubenswrapper[4875]: I1007 07:57:05.964276 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3928c10c-c3da-41eb-96b2-629d67cfb31f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://27e5a1d7a65296f23b224f9ace2760c32b6e1ca146cf7a42a601de22632250d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qd4f6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34b235fca1c383e868641a953bb908901bcd10528ee9fb668c1f71eb6e0e978a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qd4f6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hx68m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:05Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:05 crc kubenswrapper[4875]: I1007 07:57:05.988308 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rbr4j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1d636971-3387-4f3c-b4a1-54a1da1e2fbe\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c836128e294716bdbbc7bf57f81f71a88656e83be509510722d13b3f66e7d3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7bd9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://12ef6aa200be5c0cc9b34435271b54e4852ad9118d7334d57e07c1f6f75100f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7bd9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:48Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-rbr4j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:05Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:06 crc kubenswrapper[4875]: I1007 07:57:06.010869 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"01cd6f59-4f16-41d3-b43b-cd3cb9efd9b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ca1c70fa4c2dd9e87cc15766e27c2735768f392d019d6004ae50c94595651c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4bed5cf79847998d3d72c69b2f5773d64affb1a1f13ecc3af99bda8307bb8d1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ba6ab5ec6a70a00059ba119970c6b139879c3c568475f2acf45b00f4ade9e22\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10e2645f8bd2ed4e578809e1a15c65b5a0bf19a2bc135dd5f626b2b3dfce0113\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://671f4d3e518bf0230af61a6bfc3f70b8cea7f0e8f972ad605a78cded0306b084\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"message\\\":\\\"le observer\\\\nW1007 07:56:35.379471 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1007 07:56:35.379613 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1007 07:56:35.380419 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1733405231/tls.crt::/tmp/serving-cert-1733405231/tls.key\\\\\\\"\\\\nI1007 07:56:35.960385 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1007 07:56:35.963357 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1007 07:56:35.963380 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1007 07:56:35.963401 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1007 07:56:35.963407 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1007 07:56:35.978178 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1007 07:56:35.978231 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1007 07:56:35.978242 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI1007 07:56:35.978236 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1007 07:56:35.978254 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1007 07:56:35.978304 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1007 07:56:35.978312 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1007 07:56:35.978319 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1007 07:56:35.979012 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0b72e1d82ba1cfc118ea122535db5c358f503a17b497c21c481e1b56bb578e8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec502d866371b40ecc3c8728889725cc1c2d9c8be18dbce17935992980fc1b9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec502d866371b40ecc3c8728889725cc1c2d9c8be18dbce17935992980fc1b9a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:06Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:06 crc kubenswrapper[4875]: I1007 07:57:06.025256 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:06Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:06 crc kubenswrapper[4875]: I1007 07:57:06.028212 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:06 crc kubenswrapper[4875]: I1007 07:57:06.028250 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:06 crc kubenswrapper[4875]: I1007 07:57:06.028262 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:06 crc kubenswrapper[4875]: I1007 07:57:06.028280 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:06 crc kubenswrapper[4875]: I1007 07:57:06.028291 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:06Z","lastTransitionTime":"2025-10-07T07:57:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:06 crc kubenswrapper[4875]: I1007 07:57:06.042140 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43e570e7b8391b0f38f3228dc0380ac8ee23e83f498989343bdee6a53c3ede41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:06Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:06 crc kubenswrapper[4875]: I1007 07:57:06.054192 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01fe5bea18c473e716831e2a9a876a8517d6de78b9b870929ecc1c4397d33fb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:06Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:06 crc kubenswrapper[4875]: I1007 07:57:06.069851 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wc2jq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e5a790e1-c591-4cfc-930f-4805a923790b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0fda9fd366641ca5799c0ca6c7aa926af6b41609e157c11a15b2077d4a5ce3aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4s782\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wc2jq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:06Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:06 crc kubenswrapper[4875]: I1007 07:57:06.084417 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-wk8rw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dce21abc-1295-4d45-bd26-07b7e37d674c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:50Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:50Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jx7vv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jx7vv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:50Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-wk8rw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:06Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:06 crc kubenswrapper[4875]: I1007 07:57:06.131993 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:06 crc kubenswrapper[4875]: I1007 07:57:06.132056 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:06 crc kubenswrapper[4875]: I1007 07:57:06.132071 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:06 crc kubenswrapper[4875]: I1007 07:57:06.132107 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:06 crc kubenswrapper[4875]: I1007 07:57:06.132122 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:06Z","lastTransitionTime":"2025-10-07T07:57:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:06 crc kubenswrapper[4875]: I1007 07:57:06.236059 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:06 crc kubenswrapper[4875]: I1007 07:57:06.236577 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:06 crc kubenswrapper[4875]: I1007 07:57:06.236741 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:06 crc kubenswrapper[4875]: I1007 07:57:06.237001 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:06 crc kubenswrapper[4875]: I1007 07:57:06.237141 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:06Z","lastTransitionTime":"2025-10-07T07:57:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:06 crc kubenswrapper[4875]: I1007 07:57:06.341392 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:06 crc kubenswrapper[4875]: I1007 07:57:06.341455 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:06 crc kubenswrapper[4875]: I1007 07:57:06.341474 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:06 crc kubenswrapper[4875]: I1007 07:57:06.341498 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:06 crc kubenswrapper[4875]: I1007 07:57:06.341515 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:06Z","lastTransitionTime":"2025-10-07T07:57:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:06 crc kubenswrapper[4875]: I1007 07:57:06.444653 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:06 crc kubenswrapper[4875]: I1007 07:57:06.444714 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:06 crc kubenswrapper[4875]: I1007 07:57:06.444731 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:06 crc kubenswrapper[4875]: I1007 07:57:06.444755 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:06 crc kubenswrapper[4875]: I1007 07:57:06.444773 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:06Z","lastTransitionTime":"2025-10-07T07:57:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:06 crc kubenswrapper[4875]: I1007 07:57:06.547533 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:06 crc kubenswrapper[4875]: I1007 07:57:06.551722 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:06 crc kubenswrapper[4875]: I1007 07:57:06.551795 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:06 crc kubenswrapper[4875]: I1007 07:57:06.551835 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:06 crc kubenswrapper[4875]: I1007 07:57:06.551867 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:06Z","lastTransitionTime":"2025-10-07T07:57:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:06 crc kubenswrapper[4875]: I1007 07:57:06.655636 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:06 crc kubenswrapper[4875]: I1007 07:57:06.655688 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:06 crc kubenswrapper[4875]: I1007 07:57:06.655711 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:06 crc kubenswrapper[4875]: I1007 07:57:06.655728 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:06 crc kubenswrapper[4875]: I1007 07:57:06.655741 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:06Z","lastTransitionTime":"2025-10-07T07:57:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:06 crc kubenswrapper[4875]: I1007 07:57:06.666593 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/dce21abc-1295-4d45-bd26-07b7e37d674c-metrics-certs\") pod \"network-metrics-daemon-wk8rw\" (UID: \"dce21abc-1295-4d45-bd26-07b7e37d674c\") " pod="openshift-multus/network-metrics-daemon-wk8rw" Oct 07 07:57:06 crc kubenswrapper[4875]: E1007 07:57:06.666770 4875 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 07 07:57:06 crc kubenswrapper[4875]: E1007 07:57:06.666845 4875 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/dce21abc-1295-4d45-bd26-07b7e37d674c-metrics-certs podName:dce21abc-1295-4d45-bd26-07b7e37d674c nodeName:}" failed. No retries permitted until 2025-10-07 07:57:22.666814955 +0000 UTC m=+67.626585498 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/dce21abc-1295-4d45-bd26-07b7e37d674c-metrics-certs") pod "network-metrics-daemon-wk8rw" (UID: "dce21abc-1295-4d45-bd26-07b7e37d674c") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 07 07:57:06 crc kubenswrapper[4875]: I1007 07:57:06.696862 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-wk8rw" Oct 07 07:57:06 crc kubenswrapper[4875]: I1007 07:57:06.696939 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 07:57:06 crc kubenswrapper[4875]: E1007 07:57:06.697011 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-wk8rw" podUID="dce21abc-1295-4d45-bd26-07b7e37d674c" Oct 07 07:57:06 crc kubenswrapper[4875]: E1007 07:57:06.697075 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 07:57:06 crc kubenswrapper[4875]: I1007 07:57:06.698434 4875 scope.go:117] "RemoveContainer" containerID="66cb029e42151e4a186166d27962b933e0e0e9c110f07ffe602c276fb7f8f504" Oct 07 07:57:06 crc kubenswrapper[4875]: I1007 07:57:06.758493 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:06 crc kubenswrapper[4875]: I1007 07:57:06.758536 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:06 crc kubenswrapper[4875]: I1007 07:57:06.758545 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:06 crc kubenswrapper[4875]: I1007 07:57:06.758558 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:06 crc kubenswrapper[4875]: I1007 07:57:06.758568 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:06Z","lastTransitionTime":"2025-10-07T07:57:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:06 crc kubenswrapper[4875]: I1007 07:57:06.861112 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:06 crc kubenswrapper[4875]: I1007 07:57:06.861141 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:06 crc kubenswrapper[4875]: I1007 07:57:06.861149 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:06 crc kubenswrapper[4875]: I1007 07:57:06.861166 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:06 crc kubenswrapper[4875]: I1007 07:57:06.861177 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:06Z","lastTransitionTime":"2025-10-07T07:57:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:06 crc kubenswrapper[4875]: I1007 07:57:06.963745 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:06 crc kubenswrapper[4875]: I1007 07:57:06.963798 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:06 crc kubenswrapper[4875]: I1007 07:57:06.963816 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:06 crc kubenswrapper[4875]: I1007 07:57:06.963838 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:06 crc kubenswrapper[4875]: I1007 07:57:06.963855 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:06Z","lastTransitionTime":"2025-10-07T07:57:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:07 crc kubenswrapper[4875]: I1007 07:57:07.066241 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:07 crc kubenswrapper[4875]: I1007 07:57:07.066284 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:07 crc kubenswrapper[4875]: I1007 07:57:07.066301 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:07 crc kubenswrapper[4875]: I1007 07:57:07.066322 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:07 crc kubenswrapper[4875]: I1007 07:57:07.066337 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:07Z","lastTransitionTime":"2025-10-07T07:57:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:07 crc kubenswrapper[4875]: I1007 07:57:07.081427 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-8tcxj_f7806e48-48e7-4680-af2e-e93a05003370/ovnkube-controller/1.log" Oct 07 07:57:07 crc kubenswrapper[4875]: I1007 07:57:07.084238 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-8tcxj" event={"ID":"f7806e48-48e7-4680-af2e-e93a05003370","Type":"ContainerStarted","Data":"75be2ad687d37c776e821aa271a95c7dda1c9bd5d9c705202fb41c7d27d8c3eb"} Oct 07 07:57:07 crc kubenswrapper[4875]: I1007 07:57:07.084779 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-8tcxj" Oct 07 07:57:07 crc kubenswrapper[4875]: I1007 07:57:07.097616 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62ae4c53-7cf1-4229-84b4-045397dbcfba\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ae8881986c1b2abd841adfadbac7dc3d73096c5366485350536c08f478f9762\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://05f20f2103458077317614bcd5338d803c52c67e7dfc562103c817d33fa5bc79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f2213a20d36b8a125c6004957d5be402e0b85bbc2165ebd964ab499dfffb877\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://841f5c2218ad3912bf23dc2df80f24ca696829ba8e2c2d9722264688b25f0846\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:15Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:07Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:07 crc kubenswrapper[4875]: I1007 07:57:07.120347 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3527b3b9-1734-481d-9d80-9093e388afec\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7149243f8781d5a5e845821a8016bc06ec3678d69dfbde72d3429c48c64a98b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51d55feee1c9378afae0885cd063f39a2b4bd057db72c776941837d8d4098132\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6dc29635af9843109e3c200fc756d352e1a81b01dcc52a30a005291d1fa9f8d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e0b3573713f1b9d23177e5a1412d8db366ca718e6c19f6af9e234940505f2e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20382715fd9d79a3b60b16d950d6c4fe5b0d34ae14ad034f684d97762205263a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cfae59bc7d6183b2b5c6c04dd53b1bfc8b62abf90f67f74c7a56bf5039a5b50a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cfae59bc7d6183b2b5c6c04dd53b1bfc8b62abf90f67f74c7a56bf5039a5b50a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88968e9f639dc3664a87729128c531283b37b7c2da6aae70905f9f63a980fb54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88968e9f639dc3664a87729128c531283b37b7c2da6aae70905f9f63a980fb54\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://1ae0cbc996e8c2d24e8fbbf34b292d994bcc704c7b27fdc8b7fb7e64be0902ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1ae0cbc996e8c2d24e8fbbf34b292d994bcc704c7b27fdc8b7fb7e64be0902ca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:15Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:07Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:07 crc kubenswrapper[4875]: I1007 07:57:07.134119 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://455d92e6c0a43eabcbdc94940cb5bb0308a747e311fb79711b5ec8586d2f290c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e98da4809cdbbaa0bc7a3ed73df6eeb9260101f8336e16e3e93cba2be423f957\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:07Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:07 crc kubenswrapper[4875]: I1007 07:57:07.166231 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-8tcxj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f7806e48-48e7-4680-af2e-e93a05003370\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e5e9d96fa7102ca0dcbb0fcc156b21899217a32baae85be23379cca6c8a6a93c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2df6aadbe568d17b58738f194596f0e83432176769f7c5bd87e7d4000d3f1ed8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5eb9157d6f41a164c7cb952c983b441030a97a8f3b8009359440c5eba9846f26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8c31cbcb48e003dac873f108a5eb3aaff8d7016f7557fb683ae607d7819d7ceb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c944396320312ab92eb8dd7f1d771a00b965031717edbbeb2b467cee0788dec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6db9e77922c120dbfa1ccd55c96777ac3b18e0fcf8bb96956414858ae2c8fcff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://75be2ad687d37c776e821aa271a95c7dda1c9bd5d9c705202fb41c7d27d8c3eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://66cb029e42151e4a186166d27962b933e0e0e9c110f07ffe602c276fb7f8f504\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-07T07:56:49Z\\\",\\\"message\\\":\\\"ReadOnlyMounts:*true,UserNamespaces:*true,},},NodeRuntimeHandler{Name:,Features:\\\\u0026NodeRuntimeHandlerFeatures{RecursiveReadOnlyMounts:*true,UserNamespaces:*true,},},NodeRuntimeHandler{Name:runc,Features:\\\\u0026NodeRuntimeHandlerFeatures{RecursiveReadOnlyMounts:*true,UserNamespaces:*false,},},},Features:nil,},}\\\\nI1007 07:56:48.894892 6286 egressqos.go:1009] Finished syncing EgressQoS node crc : 744.984µs\\\\nI1007 07:56:48.895270 6286 handler.go:208] Removed *v1.Node event handler 2\\\\nI1007 07:56:48.895720 6286 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1007 07:56:48.895756 6286 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1007 07:56:48.895762 6286 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1007 07:56:48.895780 6286 factory.go:656] Stopping watch factory\\\\nI1007 07:56:48.895802 6286 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1007 07:56:48.895817 6286 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1007 07:56:48.895825 6286 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1007 07:56:48.897786 6286 ovnkube.go:599] Stopped ovnkube\\\\nI1007 07:56:48.897999 6286 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1007 07:56:48.898118 6286 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:48Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:57:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b3b1269130a0a1192546bec4aa5b2d4bfd65f381f6cd3948dd54d49b482e6d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e7cdfd8fed2f21e7063841e5382e668130288feffd52ef6fc5f9b5bc1894f2a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e7cdfd8fed2f21e7063841e5382e668130288feffd52ef6fc5f9b5bc1894f2a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-8tcxj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:07Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:07 crc kubenswrapper[4875]: I1007 07:57:07.168178 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:07 crc kubenswrapper[4875]: I1007 07:57:07.168206 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:07 crc kubenswrapper[4875]: I1007 07:57:07.168217 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:07 crc kubenswrapper[4875]: I1007 07:57:07.168234 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:07 crc kubenswrapper[4875]: I1007 07:57:07.168244 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:07Z","lastTransitionTime":"2025-10-07T07:57:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:07 crc kubenswrapper[4875]: I1007 07:57:07.185856 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ddx6l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0605d52-0cfc-4bcf-9218-1991257047cd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bdc7823b5b4f40adc245f9650864ebcb03f791dcbd6167f41c362bcdcc3a6655\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l9bwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:39Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ddx6l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:07Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:07 crc kubenswrapper[4875]: I1007 07:57:07.207990 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3928c10c-c3da-41eb-96b2-629d67cfb31f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://27e5a1d7a65296f23b224f9ace2760c32b6e1ca146cf7a42a601de22632250d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qd4f6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34b235fca1c383e868641a953bb908901bcd10528ee9fb668c1f71eb6e0e978a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qd4f6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hx68m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:07Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:07 crc kubenswrapper[4875]: I1007 07:57:07.220724 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rbr4j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1d636971-3387-4f3c-b4a1-54a1da1e2fbe\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c836128e294716bdbbc7bf57f81f71a88656e83be509510722d13b3f66e7d3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7bd9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://12ef6aa200be5c0cc9b34435271b54e4852ad9118d7334d57e07c1f6f75100f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7bd9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:48Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-rbr4j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:07Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:07 crc kubenswrapper[4875]: I1007 07:57:07.233770 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"01cd6f59-4f16-41d3-b43b-cd3cb9efd9b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ca1c70fa4c2dd9e87cc15766e27c2735768f392d019d6004ae50c94595651c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4bed5cf79847998d3d72c69b2f5773d64affb1a1f13ecc3af99bda8307bb8d1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ba6ab5ec6a70a00059ba119970c6b139879c3c568475f2acf45b00f4ade9e22\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10e2645f8bd2ed4e578809e1a15c65b5a0bf19a2bc135dd5f626b2b3dfce0113\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://671f4d3e518bf0230af61a6bfc3f70b8cea7f0e8f972ad605a78cded0306b084\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"message\\\":\\\"le observer\\\\nW1007 07:56:35.379471 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1007 07:56:35.379613 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1007 07:56:35.380419 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1733405231/tls.crt::/tmp/serving-cert-1733405231/tls.key\\\\\\\"\\\\nI1007 07:56:35.960385 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1007 07:56:35.963357 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1007 07:56:35.963380 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1007 07:56:35.963401 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1007 07:56:35.963407 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1007 07:56:35.978178 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1007 07:56:35.978231 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1007 07:56:35.978242 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI1007 07:56:35.978236 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1007 07:56:35.978254 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1007 07:56:35.978304 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1007 07:56:35.978312 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1007 07:56:35.978319 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1007 07:56:35.979012 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0b72e1d82ba1cfc118ea122535db5c358f503a17b497c21c481e1b56bb578e8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec502d866371b40ecc3c8728889725cc1c2d9c8be18dbce17935992980fc1b9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec502d866371b40ecc3c8728889725cc1c2d9c8be18dbce17935992980fc1b9a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:07Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:07 crc kubenswrapper[4875]: I1007 07:57:07.244739 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:07Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:07 crc kubenswrapper[4875]: I1007 07:57:07.258530 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43e570e7b8391b0f38f3228dc0380ac8ee23e83f498989343bdee6a53c3ede41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:07Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:07 crc kubenswrapper[4875]: I1007 07:57:07.270525 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:07 crc kubenswrapper[4875]: I1007 07:57:07.270567 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:07 crc kubenswrapper[4875]: I1007 07:57:07.270581 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:07 crc kubenswrapper[4875]: I1007 07:57:07.270600 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:07 crc kubenswrapper[4875]: I1007 07:57:07.270613 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:07Z","lastTransitionTime":"2025-10-07T07:57:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:07 crc kubenswrapper[4875]: I1007 07:57:07.273056 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01fe5bea18c473e716831e2a9a876a8517d6de78b9b870929ecc1c4397d33fb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:07Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:07 crc kubenswrapper[4875]: I1007 07:57:07.286833 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wc2jq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e5a790e1-c591-4cfc-930f-4805a923790b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0fda9fd366641ca5799c0ca6c7aa926af6b41609e157c11a15b2077d4a5ce3aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4s782\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wc2jq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:07Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:07 crc kubenswrapper[4875]: I1007 07:57:07.297994 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-wk8rw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dce21abc-1295-4d45-bd26-07b7e37d674c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:50Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:50Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jx7vv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jx7vv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:50Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-wk8rw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:07Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:07 crc kubenswrapper[4875]: I1007 07:57:07.312038 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:07Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:07 crc kubenswrapper[4875]: I1007 07:57:07.323788 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-9tw9m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f8b02a8a-b8d2-4097-b768-132e4c46938a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f6efce0afada950650dfd1a7345a5132509ead755458e53915fcf3335ffe73e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-9tw9m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:07Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:07 crc kubenswrapper[4875]: I1007 07:57:07.337815 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4ed41f5e-e186-4ede-a0a5-d5a015781ba1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce6527e17c61ccce5c81a87b9a6d3139c9f17a0736c08dfbb8907610d6270adb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c8f4b6a57610b6440afa3652ea5c2926072ad3bc658c72c30c8d28a2832cf900\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8a810dc9c2d8d92af77e0b4c1a21ab24e7da3de04f9c1b00b8399ec0b2cf4ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://965fc2ef9b26d12ee6c5e6adb88200cb52574c8f861f08d1f767c23ea861e6ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://965fc2ef9b26d12ee6c5e6adb88200cb52574c8f861f08d1f767c23ea861e6ef\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:16Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:15Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:07Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:07 crc kubenswrapper[4875]: I1007 07:57:07.352643 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:07Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:07 crc kubenswrapper[4875]: I1007 07:57:07.367681 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-zk2kz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7021fc56-b485-4ca6-80e8-56665ade004f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff571e5d1552af3387066929e3c02a939848e65fbf853aec600b03e0f72d5d3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d91377f30f2c7676df79d679bad49e0a162667214bd7e6ed4ad8ef3606a8752\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d91377f30f2c7676df79d679bad49e0a162667214bd7e6ed4ad8ef3606a8752\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecf0876b0824ab4fd9c692b0455050ac0a2d61663f74f13f578aebd6f3869f3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ecf0876b0824ab4fd9c692b0455050ac0a2d61663f74f13f578aebd6f3869f3f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9714afc60a059f6aa35d965ae1e15ea03e01cf9162a44d632ceed9d772e966bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9714afc60a059f6aa35d965ae1e15ea03e01cf9162a44d632ceed9d772e966bf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1f6109783534c4c80c81daf9eb0f04a9fbced0b143aeaa0c76683b9d2d38669\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c1f6109783534c4c80c81daf9eb0f04a9fbced0b143aeaa0c76683b9d2d38669\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c07a8fe8d39f3de33d3b132c33386a5c63a0948a1205f48e9a8788dd77ba8bc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c07a8fe8d39f3de33d3b132c33386a5c63a0948a1205f48e9a8788dd77ba8bc2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bce803064d5256265cdaf7f6e16d02780eab07d41d1f547dedd1c1b85972f543\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bce803064d5256265cdaf7f6e16d02780eab07d41d1f547dedd1c1b85972f543\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-zk2kz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:07Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:07 crc kubenswrapper[4875]: I1007 07:57:07.373480 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:07 crc kubenswrapper[4875]: I1007 07:57:07.373511 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:07 crc kubenswrapper[4875]: I1007 07:57:07.373520 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:07 crc kubenswrapper[4875]: I1007 07:57:07.373534 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:07 crc kubenswrapper[4875]: I1007 07:57:07.373543 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:07Z","lastTransitionTime":"2025-10-07T07:57:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:07 crc kubenswrapper[4875]: I1007 07:57:07.474494 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 07:57:07 crc kubenswrapper[4875]: E1007 07:57:07.474771 4875 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 07:57:39.474731338 +0000 UTC m=+84.434501911 (durationBeforeRetry 32s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 07:57:07 crc kubenswrapper[4875]: I1007 07:57:07.475386 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 07:57:07 crc kubenswrapper[4875]: E1007 07:57:07.475520 4875 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 07 07:57:07 crc kubenswrapper[4875]: E1007 07:57:07.475547 4875 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 07 07:57:07 crc kubenswrapper[4875]: E1007 07:57:07.475558 4875 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 07 07:57:07 crc kubenswrapper[4875]: E1007 07:57:07.475597 4875 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-07 07:57:39.475582864 +0000 UTC m=+84.435353407 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 07 07:57:07 crc kubenswrapper[4875]: E1007 07:57:07.475707 4875 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 07 07:57:07 crc kubenswrapper[4875]: E1007 07:57:07.475736 4875 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 07 07:57:07 crc kubenswrapper[4875]: E1007 07:57:07.475749 4875 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 07 07:57:07 crc kubenswrapper[4875]: E1007 07:57:07.475798 4875 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-07 07:57:39.475783361 +0000 UTC m=+84.435553904 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 07 07:57:07 crc kubenswrapper[4875]: I1007 07:57:07.475898 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 07:57:07 crc kubenswrapper[4875]: I1007 07:57:07.475939 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 07:57:07 crc kubenswrapper[4875]: I1007 07:57:07.475965 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 07:57:07 crc kubenswrapper[4875]: I1007 07:57:07.475995 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:07 crc kubenswrapper[4875]: E1007 07:57:07.476033 4875 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 07 07:57:07 crc kubenswrapper[4875]: E1007 07:57:07.476085 4875 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 07 07:57:07 crc kubenswrapper[4875]: E1007 07:57:07.476121 4875 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-07 07:57:39.476105451 +0000 UTC m=+84.435876014 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 07 07:57:07 crc kubenswrapper[4875]: E1007 07:57:07.476141 4875 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-07 07:57:39.476131702 +0000 UTC m=+84.435902255 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 07 07:57:07 crc kubenswrapper[4875]: I1007 07:57:07.476064 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:07 crc kubenswrapper[4875]: I1007 07:57:07.476176 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:07 crc kubenswrapper[4875]: I1007 07:57:07.476195 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:07 crc kubenswrapper[4875]: I1007 07:57:07.476208 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:07Z","lastTransitionTime":"2025-10-07T07:57:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:07 crc kubenswrapper[4875]: I1007 07:57:07.578612 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:07 crc kubenswrapper[4875]: I1007 07:57:07.578651 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:07 crc kubenswrapper[4875]: I1007 07:57:07.578660 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:07 crc kubenswrapper[4875]: I1007 07:57:07.578743 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:07 crc kubenswrapper[4875]: I1007 07:57:07.578754 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:07Z","lastTransitionTime":"2025-10-07T07:57:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:07 crc kubenswrapper[4875]: I1007 07:57:07.681369 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:07 crc kubenswrapper[4875]: I1007 07:57:07.681738 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:07 crc kubenswrapper[4875]: I1007 07:57:07.681747 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:07 crc kubenswrapper[4875]: I1007 07:57:07.681762 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:07 crc kubenswrapper[4875]: I1007 07:57:07.681771 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:07Z","lastTransitionTime":"2025-10-07T07:57:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:07 crc kubenswrapper[4875]: I1007 07:57:07.696654 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 07:57:07 crc kubenswrapper[4875]: E1007 07:57:07.696813 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 07:57:07 crc kubenswrapper[4875]: I1007 07:57:07.697146 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 07:57:07 crc kubenswrapper[4875]: E1007 07:57:07.697518 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 07:57:07 crc kubenswrapper[4875]: I1007 07:57:07.784584 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:07 crc kubenswrapper[4875]: I1007 07:57:07.785129 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:07 crc kubenswrapper[4875]: I1007 07:57:07.785289 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:07 crc kubenswrapper[4875]: I1007 07:57:07.785426 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:07 crc kubenswrapper[4875]: I1007 07:57:07.785616 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:07Z","lastTransitionTime":"2025-10-07T07:57:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:07 crc kubenswrapper[4875]: I1007 07:57:07.890933 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:07 crc kubenswrapper[4875]: I1007 07:57:07.891263 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:07 crc kubenswrapper[4875]: I1007 07:57:07.891363 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:07 crc kubenswrapper[4875]: I1007 07:57:07.891437 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:07 crc kubenswrapper[4875]: I1007 07:57:07.891499 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:07Z","lastTransitionTime":"2025-10-07T07:57:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:07 crc kubenswrapper[4875]: I1007 07:57:07.995338 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:07 crc kubenswrapper[4875]: I1007 07:57:07.995405 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:07 crc kubenswrapper[4875]: I1007 07:57:07.995423 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:07 crc kubenswrapper[4875]: I1007 07:57:07.995447 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:07 crc kubenswrapper[4875]: I1007 07:57:07.995466 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:07Z","lastTransitionTime":"2025-10-07T07:57:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:08 crc kubenswrapper[4875]: I1007 07:57:08.091424 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-8tcxj_f7806e48-48e7-4680-af2e-e93a05003370/ovnkube-controller/2.log" Oct 07 07:57:08 crc kubenswrapper[4875]: I1007 07:57:08.092206 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-8tcxj_f7806e48-48e7-4680-af2e-e93a05003370/ovnkube-controller/1.log" Oct 07 07:57:08 crc kubenswrapper[4875]: I1007 07:57:08.095832 4875 generic.go:334] "Generic (PLEG): container finished" podID="f7806e48-48e7-4680-af2e-e93a05003370" containerID="75be2ad687d37c776e821aa271a95c7dda1c9bd5d9c705202fb41c7d27d8c3eb" exitCode=1 Oct 07 07:57:08 crc kubenswrapper[4875]: I1007 07:57:08.095868 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-8tcxj" event={"ID":"f7806e48-48e7-4680-af2e-e93a05003370","Type":"ContainerDied","Data":"75be2ad687d37c776e821aa271a95c7dda1c9bd5d9c705202fb41c7d27d8c3eb"} Oct 07 07:57:08 crc kubenswrapper[4875]: I1007 07:57:08.095964 4875 scope.go:117] "RemoveContainer" containerID="66cb029e42151e4a186166d27962b933e0e0e9c110f07ffe602c276fb7f8f504" Oct 07 07:57:08 crc kubenswrapper[4875]: I1007 07:57:08.096621 4875 scope.go:117] "RemoveContainer" containerID="75be2ad687d37c776e821aa271a95c7dda1c9bd5d9c705202fb41c7d27d8c3eb" Oct 07 07:57:08 crc kubenswrapper[4875]: E1007 07:57:08.096787 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-8tcxj_openshift-ovn-kubernetes(f7806e48-48e7-4680-af2e-e93a05003370)\"" pod="openshift-ovn-kubernetes/ovnkube-node-8tcxj" podUID="f7806e48-48e7-4680-af2e-e93a05003370" Oct 07 07:57:08 crc kubenswrapper[4875]: I1007 07:57:08.098676 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:08 crc kubenswrapper[4875]: I1007 07:57:08.098712 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:08 crc kubenswrapper[4875]: I1007 07:57:08.098721 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:08 crc kubenswrapper[4875]: I1007 07:57:08.098735 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:08 crc kubenswrapper[4875]: I1007 07:57:08.098744 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:08Z","lastTransitionTime":"2025-10-07T07:57:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:08 crc kubenswrapper[4875]: I1007 07:57:08.114661 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4ed41f5e-e186-4ede-a0a5-d5a015781ba1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce6527e17c61ccce5c81a87b9a6d3139c9f17a0736c08dfbb8907610d6270adb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c8f4b6a57610b6440afa3652ea5c2926072ad3bc658c72c30c8d28a2832cf900\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8a810dc9c2d8d92af77e0b4c1a21ab24e7da3de04f9c1b00b8399ec0b2cf4ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://965fc2ef9b26d12ee6c5e6adb88200cb52574c8f861f08d1f767c23ea861e6ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://965fc2ef9b26d12ee6c5e6adb88200cb52574c8f861f08d1f767c23ea861e6ef\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:16Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:15Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:08Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:08 crc kubenswrapper[4875]: I1007 07:57:08.142368 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:08Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:08 crc kubenswrapper[4875]: I1007 07:57:08.168789 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-zk2kz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7021fc56-b485-4ca6-80e8-56665ade004f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff571e5d1552af3387066929e3c02a939848e65fbf853aec600b03e0f72d5d3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d91377f30f2c7676df79d679bad49e0a162667214bd7e6ed4ad8ef3606a8752\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d91377f30f2c7676df79d679bad49e0a162667214bd7e6ed4ad8ef3606a8752\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecf0876b0824ab4fd9c692b0455050ac0a2d61663f74f13f578aebd6f3869f3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ecf0876b0824ab4fd9c692b0455050ac0a2d61663f74f13f578aebd6f3869f3f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9714afc60a059f6aa35d965ae1e15ea03e01cf9162a44d632ceed9d772e966bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9714afc60a059f6aa35d965ae1e15ea03e01cf9162a44d632ceed9d772e966bf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1f6109783534c4c80c81daf9eb0f04a9fbced0b143aeaa0c76683b9d2d38669\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c1f6109783534c4c80c81daf9eb0f04a9fbced0b143aeaa0c76683b9d2d38669\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c07a8fe8d39f3de33d3b132c33386a5c63a0948a1205f48e9a8788dd77ba8bc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c07a8fe8d39f3de33d3b132c33386a5c63a0948a1205f48e9a8788dd77ba8bc2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bce803064d5256265cdaf7f6e16d02780eab07d41d1f547dedd1c1b85972f543\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bce803064d5256265cdaf7f6e16d02780eab07d41d1f547dedd1c1b85972f543\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-zk2kz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:08Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:08 crc kubenswrapper[4875]: I1007 07:57:08.190425 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62ae4c53-7cf1-4229-84b4-045397dbcfba\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ae8881986c1b2abd841adfadbac7dc3d73096c5366485350536c08f478f9762\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://05f20f2103458077317614bcd5338d803c52c67e7dfc562103c817d33fa5bc79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f2213a20d36b8a125c6004957d5be402e0b85bbc2165ebd964ab499dfffb877\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://841f5c2218ad3912bf23dc2df80f24ca696829ba8e2c2d9722264688b25f0846\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:15Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:08Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:08 crc kubenswrapper[4875]: I1007 07:57:08.201612 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:08 crc kubenswrapper[4875]: I1007 07:57:08.201966 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:08 crc kubenswrapper[4875]: I1007 07:57:08.202071 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:08 crc kubenswrapper[4875]: I1007 07:57:08.202180 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:08 crc kubenswrapper[4875]: I1007 07:57:08.202279 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:08Z","lastTransitionTime":"2025-10-07T07:57:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:08 crc kubenswrapper[4875]: I1007 07:57:08.211766 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3527b3b9-1734-481d-9d80-9093e388afec\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7149243f8781d5a5e845821a8016bc06ec3678d69dfbde72d3429c48c64a98b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51d55feee1c9378afae0885cd063f39a2b4bd057db72c776941837d8d4098132\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6dc29635af9843109e3c200fc756d352e1a81b01dcc52a30a005291d1fa9f8d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e0b3573713f1b9d23177e5a1412d8db366ca718e6c19f6af9e234940505f2e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20382715fd9d79a3b60b16d950d6c4fe5b0d34ae14ad034f684d97762205263a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cfae59bc7d6183b2b5c6c04dd53b1bfc8b62abf90f67f74c7a56bf5039a5b50a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cfae59bc7d6183b2b5c6c04dd53b1bfc8b62abf90f67f74c7a56bf5039a5b50a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88968e9f639dc3664a87729128c531283b37b7c2da6aae70905f9f63a980fb54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88968e9f639dc3664a87729128c531283b37b7c2da6aae70905f9f63a980fb54\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://1ae0cbc996e8c2d24e8fbbf34b292d994bcc704c7b27fdc8b7fb7e64be0902ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1ae0cbc996e8c2d24e8fbbf34b292d994bcc704c7b27fdc8b7fb7e64be0902ca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:15Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:08Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:08 crc kubenswrapper[4875]: I1007 07:57:08.224365 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://455d92e6c0a43eabcbdc94940cb5bb0308a747e311fb79711b5ec8586d2f290c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e98da4809cdbbaa0bc7a3ed73df6eeb9260101f8336e16e3e93cba2be423f957\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:08Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:08 crc kubenswrapper[4875]: I1007 07:57:08.254773 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-8tcxj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f7806e48-48e7-4680-af2e-e93a05003370\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e5e9d96fa7102ca0dcbb0fcc156b21899217a32baae85be23379cca6c8a6a93c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2df6aadbe568d17b58738f194596f0e83432176769f7c5bd87e7d4000d3f1ed8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5eb9157d6f41a164c7cb952c983b441030a97a8f3b8009359440c5eba9846f26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8c31cbcb48e003dac873f108a5eb3aaff8d7016f7557fb683ae607d7819d7ceb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c944396320312ab92eb8dd7f1d771a00b965031717edbbeb2b467cee0788dec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6db9e77922c120dbfa1ccd55c96777ac3b18e0fcf8bb96956414858ae2c8fcff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://75be2ad687d37c776e821aa271a95c7dda1c9bd5d9c705202fb41c7d27d8c3eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://66cb029e42151e4a186166d27962b933e0e0e9c110f07ffe602c276fb7f8f504\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-07T07:56:49Z\\\",\\\"message\\\":\\\"ReadOnlyMounts:*true,UserNamespaces:*true,},},NodeRuntimeHandler{Name:,Features:\\\\u0026NodeRuntimeHandlerFeatures{RecursiveReadOnlyMounts:*true,UserNamespaces:*true,},},NodeRuntimeHandler{Name:runc,Features:\\\\u0026NodeRuntimeHandlerFeatures{RecursiveReadOnlyMounts:*true,UserNamespaces:*false,},},},Features:nil,},}\\\\nI1007 07:56:48.894892 6286 egressqos.go:1009] Finished syncing EgressQoS node crc : 744.984µs\\\\nI1007 07:56:48.895270 6286 handler.go:208] Removed *v1.Node event handler 2\\\\nI1007 07:56:48.895720 6286 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1007 07:56:48.895756 6286 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1007 07:56:48.895762 6286 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1007 07:56:48.895780 6286 factory.go:656] Stopping watch factory\\\\nI1007 07:56:48.895802 6286 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1007 07:56:48.895817 6286 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1007 07:56:48.895825 6286 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1007 07:56:48.897786 6286 ovnkube.go:599] Stopped ovnkube\\\\nI1007 07:56:48.897999 6286 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1007 07:56:48.898118 6286 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:48Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75be2ad687d37c776e821aa271a95c7dda1c9bd5d9c705202fb41c7d27d8c3eb\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-07T07:57:07Z\\\",\\\"message\\\":\\\" start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:07Z is after 2025-08-24T17:21:41Z]\\\\nI1007 07:57:07.568842 6518 services_controller.go:473] Services do not match for network=default, existing lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-marketplace/redhat-marketplace_TCP_cluster\\\\\\\", UUID:\\\\\\\"97b6e7b0-06ca-455e-8259-06895040cb0c\\\\\\\", Protocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-marketplace/redhat-marketplace\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T07:57:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b3b1269130a0a1192546bec4aa5b2d4bfd65f381f6cd3948dd54d49b482e6d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e7cdfd8fed2f21e7063841e5382e668130288feffd52ef6fc5f9b5bc1894f2a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e7cdfd8fed2f21e7063841e5382e668130288feffd52ef6fc5f9b5bc1894f2a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-8tcxj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:08Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:08 crc kubenswrapper[4875]: I1007 07:57:08.271347 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ddx6l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0605d52-0cfc-4bcf-9218-1991257047cd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bdc7823b5b4f40adc245f9650864ebcb03f791dcbd6167f41c362bcdcc3a6655\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l9bwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:39Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ddx6l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:08Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:08 crc kubenswrapper[4875]: I1007 07:57:08.298310 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"01cd6f59-4f16-41d3-b43b-cd3cb9efd9b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ca1c70fa4c2dd9e87cc15766e27c2735768f392d019d6004ae50c94595651c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4bed5cf79847998d3d72c69b2f5773d64affb1a1f13ecc3af99bda8307bb8d1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ba6ab5ec6a70a00059ba119970c6b139879c3c568475f2acf45b00f4ade9e22\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10e2645f8bd2ed4e578809e1a15c65b5a0bf19a2bc135dd5f626b2b3dfce0113\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://671f4d3e518bf0230af61a6bfc3f70b8cea7f0e8f972ad605a78cded0306b084\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"message\\\":\\\"le observer\\\\nW1007 07:56:35.379471 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1007 07:56:35.379613 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1007 07:56:35.380419 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1733405231/tls.crt::/tmp/serving-cert-1733405231/tls.key\\\\\\\"\\\\nI1007 07:56:35.960385 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1007 07:56:35.963357 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1007 07:56:35.963380 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1007 07:56:35.963401 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1007 07:56:35.963407 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1007 07:56:35.978178 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1007 07:56:35.978231 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1007 07:56:35.978242 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI1007 07:56:35.978236 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1007 07:56:35.978254 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1007 07:56:35.978304 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1007 07:56:35.978312 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1007 07:56:35.978319 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1007 07:56:35.979012 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0b72e1d82ba1cfc118ea122535db5c358f503a17b497c21c481e1b56bb578e8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec502d866371b40ecc3c8728889725cc1c2d9c8be18dbce17935992980fc1b9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec502d866371b40ecc3c8728889725cc1c2d9c8be18dbce17935992980fc1b9a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:08Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:08 crc kubenswrapper[4875]: I1007 07:57:08.304813 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:08 crc kubenswrapper[4875]: I1007 07:57:08.304848 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:08 crc kubenswrapper[4875]: I1007 07:57:08.304858 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:08 crc kubenswrapper[4875]: I1007 07:57:08.304874 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:08 crc kubenswrapper[4875]: I1007 07:57:08.304897 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:08Z","lastTransitionTime":"2025-10-07T07:57:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:08 crc kubenswrapper[4875]: I1007 07:57:08.317938 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:08Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:08 crc kubenswrapper[4875]: I1007 07:57:08.335823 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43e570e7b8391b0f38f3228dc0380ac8ee23e83f498989343bdee6a53c3ede41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:08Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:08 crc kubenswrapper[4875]: I1007 07:57:08.351279 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01fe5bea18c473e716831e2a9a876a8517d6de78b9b870929ecc1c4397d33fb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:08Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:08 crc kubenswrapper[4875]: I1007 07:57:08.371905 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wc2jq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e5a790e1-c591-4cfc-930f-4805a923790b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0fda9fd366641ca5799c0ca6c7aa926af6b41609e157c11a15b2077d4a5ce3aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4s782\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wc2jq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:08Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:08 crc kubenswrapper[4875]: I1007 07:57:08.383208 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3928c10c-c3da-41eb-96b2-629d67cfb31f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://27e5a1d7a65296f23b224f9ace2760c32b6e1ca146cf7a42a601de22632250d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qd4f6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34b235fca1c383e868641a953bb908901bcd10528ee9fb668c1f71eb6e0e978a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qd4f6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hx68m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:08Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:08 crc kubenswrapper[4875]: I1007 07:57:08.394767 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rbr4j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1d636971-3387-4f3c-b4a1-54a1da1e2fbe\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c836128e294716bdbbc7bf57f81f71a88656e83be509510722d13b3f66e7d3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7bd9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://12ef6aa200be5c0cc9b34435271b54e4852ad9118d7334d57e07c1f6f75100f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7bd9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:48Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-rbr4j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:08Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:08 crc kubenswrapper[4875]: I1007 07:57:08.404824 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-wk8rw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dce21abc-1295-4d45-bd26-07b7e37d674c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:50Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:50Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jx7vv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jx7vv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:50Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-wk8rw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:08Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:08 crc kubenswrapper[4875]: I1007 07:57:08.407275 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:08 crc kubenswrapper[4875]: I1007 07:57:08.407417 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:08 crc kubenswrapper[4875]: I1007 07:57:08.407522 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:08 crc kubenswrapper[4875]: I1007 07:57:08.407614 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:08 crc kubenswrapper[4875]: I1007 07:57:08.407696 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:08Z","lastTransitionTime":"2025-10-07T07:57:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:08 crc kubenswrapper[4875]: I1007 07:57:08.420809 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:08Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:08 crc kubenswrapper[4875]: I1007 07:57:08.434496 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-9tw9m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f8b02a8a-b8d2-4097-b768-132e4c46938a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f6efce0afada950650dfd1a7345a5132509ead755458e53915fcf3335ffe73e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-9tw9m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:08Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:08 crc kubenswrapper[4875]: I1007 07:57:08.510153 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:08 crc kubenswrapper[4875]: I1007 07:57:08.510209 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:08 crc kubenswrapper[4875]: I1007 07:57:08.510227 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:08 crc kubenswrapper[4875]: I1007 07:57:08.510251 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:08 crc kubenswrapper[4875]: I1007 07:57:08.510267 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:08Z","lastTransitionTime":"2025-10-07T07:57:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:08 crc kubenswrapper[4875]: I1007 07:57:08.613253 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:08 crc kubenswrapper[4875]: I1007 07:57:08.613311 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:08 crc kubenswrapper[4875]: I1007 07:57:08.613328 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:08 crc kubenswrapper[4875]: I1007 07:57:08.613358 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:08 crc kubenswrapper[4875]: I1007 07:57:08.613376 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:08Z","lastTransitionTime":"2025-10-07T07:57:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:08 crc kubenswrapper[4875]: I1007 07:57:08.697104 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-wk8rw" Oct 07 07:57:08 crc kubenswrapper[4875]: I1007 07:57:08.697180 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 07:57:08 crc kubenswrapper[4875]: E1007 07:57:08.697294 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-wk8rw" podUID="dce21abc-1295-4d45-bd26-07b7e37d674c" Oct 07 07:57:08 crc kubenswrapper[4875]: E1007 07:57:08.697494 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 07:57:08 crc kubenswrapper[4875]: I1007 07:57:08.716707 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:08 crc kubenswrapper[4875]: I1007 07:57:08.716750 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:08 crc kubenswrapper[4875]: I1007 07:57:08.716759 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:08 crc kubenswrapper[4875]: I1007 07:57:08.716777 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:08 crc kubenswrapper[4875]: I1007 07:57:08.716790 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:08Z","lastTransitionTime":"2025-10-07T07:57:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:08 crc kubenswrapper[4875]: I1007 07:57:08.819663 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:08 crc kubenswrapper[4875]: I1007 07:57:08.819712 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:08 crc kubenswrapper[4875]: I1007 07:57:08.819723 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:08 crc kubenswrapper[4875]: I1007 07:57:08.819743 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:08 crc kubenswrapper[4875]: I1007 07:57:08.819757 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:08Z","lastTransitionTime":"2025-10-07T07:57:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:08 crc kubenswrapper[4875]: I1007 07:57:08.922831 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:08 crc kubenswrapper[4875]: I1007 07:57:08.922967 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:08 crc kubenswrapper[4875]: I1007 07:57:08.922986 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:08 crc kubenswrapper[4875]: I1007 07:57:08.923005 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:08 crc kubenswrapper[4875]: I1007 07:57:08.923019 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:08Z","lastTransitionTime":"2025-10-07T07:57:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:09 crc kubenswrapper[4875]: I1007 07:57:09.028854 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:09 crc kubenswrapper[4875]: I1007 07:57:09.028993 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:09 crc kubenswrapper[4875]: I1007 07:57:09.029010 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:09 crc kubenswrapper[4875]: I1007 07:57:09.029056 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:09 crc kubenswrapper[4875]: I1007 07:57:09.029078 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:09Z","lastTransitionTime":"2025-10-07T07:57:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:09 crc kubenswrapper[4875]: I1007 07:57:09.103823 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-8tcxj_f7806e48-48e7-4680-af2e-e93a05003370/ovnkube-controller/2.log" Oct 07 07:57:09 crc kubenswrapper[4875]: I1007 07:57:09.107959 4875 scope.go:117] "RemoveContainer" containerID="75be2ad687d37c776e821aa271a95c7dda1c9bd5d9c705202fb41c7d27d8c3eb" Oct 07 07:57:09 crc kubenswrapper[4875]: E1007 07:57:09.108139 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-8tcxj_openshift-ovn-kubernetes(f7806e48-48e7-4680-af2e-e93a05003370)\"" pod="openshift-ovn-kubernetes/ovnkube-node-8tcxj" podUID="f7806e48-48e7-4680-af2e-e93a05003370" Oct 07 07:57:09 crc kubenswrapper[4875]: I1007 07:57:09.132351 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:09 crc kubenswrapper[4875]: I1007 07:57:09.132430 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:09 crc kubenswrapper[4875]: I1007 07:57:09.132451 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:09 crc kubenswrapper[4875]: I1007 07:57:09.132485 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:09 crc kubenswrapper[4875]: I1007 07:57:09.132508 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:09Z","lastTransitionTime":"2025-10-07T07:57:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:09 crc kubenswrapper[4875]: I1007 07:57:09.136398 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:09Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:09 crc kubenswrapper[4875]: I1007 07:57:09.157994 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-9tw9m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f8b02a8a-b8d2-4097-b768-132e4c46938a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f6efce0afada950650dfd1a7345a5132509ead755458e53915fcf3335ffe73e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-9tw9m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:09Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:09 crc kubenswrapper[4875]: I1007 07:57:09.176350 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4ed41f5e-e186-4ede-a0a5-d5a015781ba1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce6527e17c61ccce5c81a87b9a6d3139c9f17a0736c08dfbb8907610d6270adb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c8f4b6a57610b6440afa3652ea5c2926072ad3bc658c72c30c8d28a2832cf900\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8a810dc9c2d8d92af77e0b4c1a21ab24e7da3de04f9c1b00b8399ec0b2cf4ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://965fc2ef9b26d12ee6c5e6adb88200cb52574c8f861f08d1f767c23ea861e6ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://965fc2ef9b26d12ee6c5e6adb88200cb52574c8f861f08d1f767c23ea861e6ef\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:16Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:15Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:09Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:09 crc kubenswrapper[4875]: I1007 07:57:09.197256 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:09Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:09 crc kubenswrapper[4875]: I1007 07:57:09.219733 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-zk2kz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7021fc56-b485-4ca6-80e8-56665ade004f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff571e5d1552af3387066929e3c02a939848e65fbf853aec600b03e0f72d5d3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d91377f30f2c7676df79d679bad49e0a162667214bd7e6ed4ad8ef3606a8752\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d91377f30f2c7676df79d679bad49e0a162667214bd7e6ed4ad8ef3606a8752\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecf0876b0824ab4fd9c692b0455050ac0a2d61663f74f13f578aebd6f3869f3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ecf0876b0824ab4fd9c692b0455050ac0a2d61663f74f13f578aebd6f3869f3f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9714afc60a059f6aa35d965ae1e15ea03e01cf9162a44d632ceed9d772e966bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9714afc60a059f6aa35d965ae1e15ea03e01cf9162a44d632ceed9d772e966bf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1f6109783534c4c80c81daf9eb0f04a9fbced0b143aeaa0c76683b9d2d38669\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c1f6109783534c4c80c81daf9eb0f04a9fbced0b143aeaa0c76683b9d2d38669\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c07a8fe8d39f3de33d3b132c33386a5c63a0948a1205f48e9a8788dd77ba8bc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c07a8fe8d39f3de33d3b132c33386a5c63a0948a1205f48e9a8788dd77ba8bc2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bce803064d5256265cdaf7f6e16d02780eab07d41d1f547dedd1c1b85972f543\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bce803064d5256265cdaf7f6e16d02780eab07d41d1f547dedd1c1b85972f543\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-zk2kz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:09Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:09 crc kubenswrapper[4875]: I1007 07:57:09.236473 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:09 crc kubenswrapper[4875]: I1007 07:57:09.236527 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:09 crc kubenswrapper[4875]: I1007 07:57:09.236543 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:09 crc kubenswrapper[4875]: I1007 07:57:09.236568 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:09 crc kubenswrapper[4875]: I1007 07:57:09.236587 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:09Z","lastTransitionTime":"2025-10-07T07:57:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:09 crc kubenswrapper[4875]: I1007 07:57:09.244539 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ddx6l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0605d52-0cfc-4bcf-9218-1991257047cd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bdc7823b5b4f40adc245f9650864ebcb03f791dcbd6167f41c362bcdcc3a6655\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l9bwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:39Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ddx6l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:09Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:09 crc kubenswrapper[4875]: I1007 07:57:09.267718 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62ae4c53-7cf1-4229-84b4-045397dbcfba\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ae8881986c1b2abd841adfadbac7dc3d73096c5366485350536c08f478f9762\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://05f20f2103458077317614bcd5338d803c52c67e7dfc562103c817d33fa5bc79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f2213a20d36b8a125c6004957d5be402e0b85bbc2165ebd964ab499dfffb877\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://841f5c2218ad3912bf23dc2df80f24ca696829ba8e2c2d9722264688b25f0846\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:15Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:09Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:09 crc kubenswrapper[4875]: I1007 07:57:09.299691 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3527b3b9-1734-481d-9d80-9093e388afec\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7149243f8781d5a5e845821a8016bc06ec3678d69dfbde72d3429c48c64a98b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51d55feee1c9378afae0885cd063f39a2b4bd057db72c776941837d8d4098132\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6dc29635af9843109e3c200fc756d352e1a81b01dcc52a30a005291d1fa9f8d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e0b3573713f1b9d23177e5a1412d8db366ca718e6c19f6af9e234940505f2e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20382715fd9d79a3b60b16d950d6c4fe5b0d34ae14ad034f684d97762205263a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cfae59bc7d6183b2b5c6c04dd53b1bfc8b62abf90f67f74c7a56bf5039a5b50a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cfae59bc7d6183b2b5c6c04dd53b1bfc8b62abf90f67f74c7a56bf5039a5b50a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88968e9f639dc3664a87729128c531283b37b7c2da6aae70905f9f63a980fb54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88968e9f639dc3664a87729128c531283b37b7c2da6aae70905f9f63a980fb54\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://1ae0cbc996e8c2d24e8fbbf34b292d994bcc704c7b27fdc8b7fb7e64be0902ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1ae0cbc996e8c2d24e8fbbf34b292d994bcc704c7b27fdc8b7fb7e64be0902ca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:15Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:09Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:09 crc kubenswrapper[4875]: I1007 07:57:09.320590 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://455d92e6c0a43eabcbdc94940cb5bb0308a747e311fb79711b5ec8586d2f290c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e98da4809cdbbaa0bc7a3ed73df6eeb9260101f8336e16e3e93cba2be423f957\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:09Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:09 crc kubenswrapper[4875]: I1007 07:57:09.339303 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:09 crc kubenswrapper[4875]: I1007 07:57:09.339351 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:09 crc kubenswrapper[4875]: I1007 07:57:09.339363 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:09 crc kubenswrapper[4875]: I1007 07:57:09.339382 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:09 crc kubenswrapper[4875]: I1007 07:57:09.339395 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:09Z","lastTransitionTime":"2025-10-07T07:57:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:09 crc kubenswrapper[4875]: I1007 07:57:09.347074 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-8tcxj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f7806e48-48e7-4680-af2e-e93a05003370\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e5e9d96fa7102ca0dcbb0fcc156b21899217a32baae85be23379cca6c8a6a93c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2df6aadbe568d17b58738f194596f0e83432176769f7c5bd87e7d4000d3f1ed8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5eb9157d6f41a164c7cb952c983b441030a97a8f3b8009359440c5eba9846f26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8c31cbcb48e003dac873f108a5eb3aaff8d7016f7557fb683ae607d7819d7ceb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c944396320312ab92eb8dd7f1d771a00b965031717edbbeb2b467cee0788dec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6db9e77922c120dbfa1ccd55c96777ac3b18e0fcf8bb96956414858ae2c8fcff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://75be2ad687d37c776e821aa271a95c7dda1c9bd5d9c705202fb41c7d27d8c3eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75be2ad687d37c776e821aa271a95c7dda1c9bd5d9c705202fb41c7d27d8c3eb\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-07T07:57:07Z\\\",\\\"message\\\":\\\" start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:07Z is after 2025-08-24T17:21:41Z]\\\\nI1007 07:57:07.568842 6518 services_controller.go:473] Services do not match for network=default, existing lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-marketplace/redhat-marketplace_TCP_cluster\\\\\\\", UUID:\\\\\\\"97b6e7b0-06ca-455e-8259-06895040cb0c\\\\\\\", Protocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-marketplace/redhat-marketplace\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T07:57:06Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-8tcxj_openshift-ovn-kubernetes(f7806e48-48e7-4680-af2e-e93a05003370)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b3b1269130a0a1192546bec4aa5b2d4bfd65f381f6cd3948dd54d49b482e6d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e7cdfd8fed2f21e7063841e5382e668130288feffd52ef6fc5f9b5bc1894f2a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e7cdfd8fed2f21e7063841e5382e668130288feffd52ef6fc5f9b5bc1894f2a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-8tcxj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:09Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:09 crc kubenswrapper[4875]: I1007 07:57:09.366032 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01fe5bea18c473e716831e2a9a876a8517d6de78b9b870929ecc1c4397d33fb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:09Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:09 crc kubenswrapper[4875]: I1007 07:57:09.382757 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wc2jq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e5a790e1-c591-4cfc-930f-4805a923790b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0fda9fd366641ca5799c0ca6c7aa926af6b41609e157c11a15b2077d4a5ce3aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4s782\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wc2jq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:09Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:09 crc kubenswrapper[4875]: I1007 07:57:09.398969 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3928c10c-c3da-41eb-96b2-629d67cfb31f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://27e5a1d7a65296f23b224f9ace2760c32b6e1ca146cf7a42a601de22632250d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qd4f6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34b235fca1c383e868641a953bb908901bcd10528ee9fb668c1f71eb6e0e978a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qd4f6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hx68m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:09Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:09 crc kubenswrapper[4875]: I1007 07:57:09.415162 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rbr4j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1d636971-3387-4f3c-b4a1-54a1da1e2fbe\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c836128e294716bdbbc7bf57f81f71a88656e83be509510722d13b3f66e7d3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7bd9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://12ef6aa200be5c0cc9b34435271b54e4852ad9118d7334d57e07c1f6f75100f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7bd9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:48Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-rbr4j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:09Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:09 crc kubenswrapper[4875]: I1007 07:57:09.437052 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"01cd6f59-4f16-41d3-b43b-cd3cb9efd9b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ca1c70fa4c2dd9e87cc15766e27c2735768f392d019d6004ae50c94595651c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4bed5cf79847998d3d72c69b2f5773d64affb1a1f13ecc3af99bda8307bb8d1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ba6ab5ec6a70a00059ba119970c6b139879c3c568475f2acf45b00f4ade9e22\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10e2645f8bd2ed4e578809e1a15c65b5a0bf19a2bc135dd5f626b2b3dfce0113\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://671f4d3e518bf0230af61a6bfc3f70b8cea7f0e8f972ad605a78cded0306b084\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"message\\\":\\\"le observer\\\\nW1007 07:56:35.379471 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1007 07:56:35.379613 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1007 07:56:35.380419 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1733405231/tls.crt::/tmp/serving-cert-1733405231/tls.key\\\\\\\"\\\\nI1007 07:56:35.960385 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1007 07:56:35.963357 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1007 07:56:35.963380 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1007 07:56:35.963401 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1007 07:56:35.963407 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1007 07:56:35.978178 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1007 07:56:35.978231 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1007 07:56:35.978242 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI1007 07:56:35.978236 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1007 07:56:35.978254 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1007 07:56:35.978304 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1007 07:56:35.978312 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1007 07:56:35.978319 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1007 07:56:35.979012 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0b72e1d82ba1cfc118ea122535db5c358f503a17b497c21c481e1b56bb578e8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec502d866371b40ecc3c8728889725cc1c2d9c8be18dbce17935992980fc1b9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec502d866371b40ecc3c8728889725cc1c2d9c8be18dbce17935992980fc1b9a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:09Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:09 crc kubenswrapper[4875]: I1007 07:57:09.442354 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:09 crc kubenswrapper[4875]: I1007 07:57:09.442409 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:09 crc kubenswrapper[4875]: I1007 07:57:09.442424 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:09 crc kubenswrapper[4875]: I1007 07:57:09.442446 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:09 crc kubenswrapper[4875]: I1007 07:57:09.442460 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:09Z","lastTransitionTime":"2025-10-07T07:57:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:09 crc kubenswrapper[4875]: I1007 07:57:09.454837 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:09Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:09 crc kubenswrapper[4875]: I1007 07:57:09.473097 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43e570e7b8391b0f38f3228dc0380ac8ee23e83f498989343bdee6a53c3ede41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:09Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:09 crc kubenswrapper[4875]: I1007 07:57:09.485574 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-wk8rw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dce21abc-1295-4d45-bd26-07b7e37d674c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:50Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:50Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jx7vv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jx7vv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:50Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-wk8rw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:09Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:09 crc kubenswrapper[4875]: I1007 07:57:09.545592 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:09 crc kubenswrapper[4875]: I1007 07:57:09.545672 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:09 crc kubenswrapper[4875]: I1007 07:57:09.545694 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:09 crc kubenswrapper[4875]: I1007 07:57:09.545726 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:09 crc kubenswrapper[4875]: I1007 07:57:09.545747 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:09Z","lastTransitionTime":"2025-10-07T07:57:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:09 crc kubenswrapper[4875]: I1007 07:57:09.649138 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:09 crc kubenswrapper[4875]: I1007 07:57:09.649197 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:09 crc kubenswrapper[4875]: I1007 07:57:09.649215 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:09 crc kubenswrapper[4875]: I1007 07:57:09.649240 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:09 crc kubenswrapper[4875]: I1007 07:57:09.649258 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:09Z","lastTransitionTime":"2025-10-07T07:57:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:09 crc kubenswrapper[4875]: I1007 07:57:09.696960 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 07:57:09 crc kubenswrapper[4875]: I1007 07:57:09.696990 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 07:57:09 crc kubenswrapper[4875]: E1007 07:57:09.697149 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 07:57:09 crc kubenswrapper[4875]: E1007 07:57:09.697269 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 07:57:09 crc kubenswrapper[4875]: I1007 07:57:09.752757 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:09 crc kubenswrapper[4875]: I1007 07:57:09.753278 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:09 crc kubenswrapper[4875]: I1007 07:57:09.753495 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:09 crc kubenswrapper[4875]: I1007 07:57:09.753668 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:09 crc kubenswrapper[4875]: I1007 07:57:09.754203 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:09Z","lastTransitionTime":"2025-10-07T07:57:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:09 crc kubenswrapper[4875]: I1007 07:57:09.857874 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:09 crc kubenswrapper[4875]: I1007 07:57:09.857983 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:09 crc kubenswrapper[4875]: I1007 07:57:09.858011 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:09 crc kubenswrapper[4875]: I1007 07:57:09.858045 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:09 crc kubenswrapper[4875]: I1007 07:57:09.858071 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:09Z","lastTransitionTime":"2025-10-07T07:57:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:09 crc kubenswrapper[4875]: I1007 07:57:09.961497 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:09 crc kubenswrapper[4875]: I1007 07:57:09.961579 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:09 crc kubenswrapper[4875]: I1007 07:57:09.961603 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:09 crc kubenswrapper[4875]: I1007 07:57:09.961633 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:09 crc kubenswrapper[4875]: I1007 07:57:09.961655 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:09Z","lastTransitionTime":"2025-10-07T07:57:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:10 crc kubenswrapper[4875]: I1007 07:57:10.064597 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:10 crc kubenswrapper[4875]: I1007 07:57:10.064639 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:10 crc kubenswrapper[4875]: I1007 07:57:10.064651 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:10 crc kubenswrapper[4875]: I1007 07:57:10.064668 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:10 crc kubenswrapper[4875]: I1007 07:57:10.064678 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:10Z","lastTransitionTime":"2025-10-07T07:57:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:10 crc kubenswrapper[4875]: I1007 07:57:10.166948 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:10 crc kubenswrapper[4875]: I1007 07:57:10.167019 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:10 crc kubenswrapper[4875]: I1007 07:57:10.167031 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:10 crc kubenswrapper[4875]: I1007 07:57:10.167056 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:10 crc kubenswrapper[4875]: I1007 07:57:10.167075 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:10Z","lastTransitionTime":"2025-10-07T07:57:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:10 crc kubenswrapper[4875]: I1007 07:57:10.276453 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:10 crc kubenswrapper[4875]: I1007 07:57:10.276492 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:10 crc kubenswrapper[4875]: I1007 07:57:10.276503 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:10 crc kubenswrapper[4875]: I1007 07:57:10.276519 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:10 crc kubenswrapper[4875]: I1007 07:57:10.276532 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:10Z","lastTransitionTime":"2025-10-07T07:57:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:10 crc kubenswrapper[4875]: I1007 07:57:10.379392 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:10 crc kubenswrapper[4875]: I1007 07:57:10.379476 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:10 crc kubenswrapper[4875]: I1007 07:57:10.379503 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:10 crc kubenswrapper[4875]: I1007 07:57:10.379541 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:10 crc kubenswrapper[4875]: I1007 07:57:10.379562 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:10Z","lastTransitionTime":"2025-10-07T07:57:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:10 crc kubenswrapper[4875]: I1007 07:57:10.482191 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:10 crc kubenswrapper[4875]: I1007 07:57:10.482286 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:10 crc kubenswrapper[4875]: I1007 07:57:10.482305 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:10 crc kubenswrapper[4875]: I1007 07:57:10.482337 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:10 crc kubenswrapper[4875]: I1007 07:57:10.482357 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:10Z","lastTransitionTime":"2025-10-07T07:57:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:10 crc kubenswrapper[4875]: I1007 07:57:10.585912 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:10 crc kubenswrapper[4875]: I1007 07:57:10.585955 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:10 crc kubenswrapper[4875]: I1007 07:57:10.585966 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:10 crc kubenswrapper[4875]: I1007 07:57:10.585981 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:10 crc kubenswrapper[4875]: I1007 07:57:10.585991 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:10Z","lastTransitionTime":"2025-10-07T07:57:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:10 crc kubenswrapper[4875]: I1007 07:57:10.689249 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:10 crc kubenswrapper[4875]: I1007 07:57:10.689316 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:10 crc kubenswrapper[4875]: I1007 07:57:10.689335 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:10 crc kubenswrapper[4875]: I1007 07:57:10.689364 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:10 crc kubenswrapper[4875]: I1007 07:57:10.689381 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:10Z","lastTransitionTime":"2025-10-07T07:57:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:10 crc kubenswrapper[4875]: I1007 07:57:10.696720 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-wk8rw" Oct 07 07:57:10 crc kubenswrapper[4875]: I1007 07:57:10.696856 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 07:57:10 crc kubenswrapper[4875]: E1007 07:57:10.697275 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-wk8rw" podUID="dce21abc-1295-4d45-bd26-07b7e37d674c" Oct 07 07:57:10 crc kubenswrapper[4875]: E1007 07:57:10.697445 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 07:57:10 crc kubenswrapper[4875]: I1007 07:57:10.791663 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:10 crc kubenswrapper[4875]: I1007 07:57:10.791745 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:10 crc kubenswrapper[4875]: I1007 07:57:10.791780 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:10 crc kubenswrapper[4875]: I1007 07:57:10.791812 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:10 crc kubenswrapper[4875]: I1007 07:57:10.791834 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:10Z","lastTransitionTime":"2025-10-07T07:57:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:10 crc kubenswrapper[4875]: I1007 07:57:10.894367 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:10 crc kubenswrapper[4875]: I1007 07:57:10.894460 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:10 crc kubenswrapper[4875]: I1007 07:57:10.894487 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:10 crc kubenswrapper[4875]: I1007 07:57:10.894519 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:10 crc kubenswrapper[4875]: I1007 07:57:10.894542 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:10Z","lastTransitionTime":"2025-10-07T07:57:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:10 crc kubenswrapper[4875]: I1007 07:57:10.998079 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:10 crc kubenswrapper[4875]: I1007 07:57:10.998199 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:10 crc kubenswrapper[4875]: I1007 07:57:10.998220 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:10 crc kubenswrapper[4875]: I1007 07:57:10.998244 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:10 crc kubenswrapper[4875]: I1007 07:57:10.998268 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:10Z","lastTransitionTime":"2025-10-07T07:57:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:11 crc kubenswrapper[4875]: I1007 07:57:11.100545 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:11 crc kubenswrapper[4875]: I1007 07:57:11.100621 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:11 crc kubenswrapper[4875]: I1007 07:57:11.100643 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:11 crc kubenswrapper[4875]: I1007 07:57:11.100670 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:11 crc kubenswrapper[4875]: I1007 07:57:11.100690 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:11Z","lastTransitionTime":"2025-10-07T07:57:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:11 crc kubenswrapper[4875]: I1007 07:57:11.203718 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:11 crc kubenswrapper[4875]: I1007 07:57:11.203779 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:11 crc kubenswrapper[4875]: I1007 07:57:11.203797 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:11 crc kubenswrapper[4875]: I1007 07:57:11.203821 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:11 crc kubenswrapper[4875]: I1007 07:57:11.203840 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:11Z","lastTransitionTime":"2025-10-07T07:57:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:11 crc kubenswrapper[4875]: I1007 07:57:11.306609 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:11 crc kubenswrapper[4875]: I1007 07:57:11.306699 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:11 crc kubenswrapper[4875]: I1007 07:57:11.306775 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:11 crc kubenswrapper[4875]: I1007 07:57:11.306805 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:11 crc kubenswrapper[4875]: I1007 07:57:11.306828 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:11Z","lastTransitionTime":"2025-10-07T07:57:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:11 crc kubenswrapper[4875]: I1007 07:57:11.409823 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:11 crc kubenswrapper[4875]: I1007 07:57:11.409865 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:11 crc kubenswrapper[4875]: I1007 07:57:11.409894 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:11 crc kubenswrapper[4875]: I1007 07:57:11.409910 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:11 crc kubenswrapper[4875]: I1007 07:57:11.409922 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:11Z","lastTransitionTime":"2025-10-07T07:57:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:11 crc kubenswrapper[4875]: I1007 07:57:11.512801 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:11 crc kubenswrapper[4875]: I1007 07:57:11.512869 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:11 crc kubenswrapper[4875]: I1007 07:57:11.513418 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:11 crc kubenswrapper[4875]: I1007 07:57:11.513452 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:11 crc kubenswrapper[4875]: I1007 07:57:11.513475 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:11Z","lastTransitionTime":"2025-10-07T07:57:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:11 crc kubenswrapper[4875]: I1007 07:57:11.616548 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:11 crc kubenswrapper[4875]: I1007 07:57:11.616637 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:11 crc kubenswrapper[4875]: I1007 07:57:11.616657 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:11 crc kubenswrapper[4875]: I1007 07:57:11.616680 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:11 crc kubenswrapper[4875]: I1007 07:57:11.616703 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:11Z","lastTransitionTime":"2025-10-07T07:57:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:11 crc kubenswrapper[4875]: I1007 07:57:11.696820 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 07:57:11 crc kubenswrapper[4875]: E1007 07:57:11.696999 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 07:57:11 crc kubenswrapper[4875]: I1007 07:57:11.697099 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 07:57:11 crc kubenswrapper[4875]: E1007 07:57:11.697274 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 07:57:11 crc kubenswrapper[4875]: I1007 07:57:11.719678 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:11 crc kubenswrapper[4875]: I1007 07:57:11.719712 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:11 crc kubenswrapper[4875]: I1007 07:57:11.719722 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:11 crc kubenswrapper[4875]: I1007 07:57:11.719756 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:11 crc kubenswrapper[4875]: I1007 07:57:11.719768 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:11Z","lastTransitionTime":"2025-10-07T07:57:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:11 crc kubenswrapper[4875]: I1007 07:57:11.823110 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:11 crc kubenswrapper[4875]: I1007 07:57:11.823194 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:11 crc kubenswrapper[4875]: I1007 07:57:11.823218 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:11 crc kubenswrapper[4875]: I1007 07:57:11.823247 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:11 crc kubenswrapper[4875]: I1007 07:57:11.823268 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:11Z","lastTransitionTime":"2025-10-07T07:57:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:11 crc kubenswrapper[4875]: I1007 07:57:11.926387 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:11 crc kubenswrapper[4875]: I1007 07:57:11.926424 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:11 crc kubenswrapper[4875]: I1007 07:57:11.926434 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:11 crc kubenswrapper[4875]: I1007 07:57:11.926450 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:11 crc kubenswrapper[4875]: I1007 07:57:11.926460 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:11Z","lastTransitionTime":"2025-10-07T07:57:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:12 crc kubenswrapper[4875]: I1007 07:57:12.029634 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:12 crc kubenswrapper[4875]: I1007 07:57:12.029689 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:12 crc kubenswrapper[4875]: I1007 07:57:12.029704 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:12 crc kubenswrapper[4875]: I1007 07:57:12.029724 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:12 crc kubenswrapper[4875]: I1007 07:57:12.029740 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:12Z","lastTransitionTime":"2025-10-07T07:57:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:12 crc kubenswrapper[4875]: I1007 07:57:12.131644 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:12 crc kubenswrapper[4875]: I1007 07:57:12.131699 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:12 crc kubenswrapper[4875]: I1007 07:57:12.131711 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:12 crc kubenswrapper[4875]: I1007 07:57:12.131727 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:12 crc kubenswrapper[4875]: I1007 07:57:12.131740 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:12Z","lastTransitionTime":"2025-10-07T07:57:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:12 crc kubenswrapper[4875]: I1007 07:57:12.234745 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:12 crc kubenswrapper[4875]: I1007 07:57:12.234813 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:12 crc kubenswrapper[4875]: I1007 07:57:12.234844 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:12 crc kubenswrapper[4875]: I1007 07:57:12.234921 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:12 crc kubenswrapper[4875]: I1007 07:57:12.234944 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:12Z","lastTransitionTime":"2025-10-07T07:57:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:12 crc kubenswrapper[4875]: I1007 07:57:12.338052 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:12 crc kubenswrapper[4875]: I1007 07:57:12.338110 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:12 crc kubenswrapper[4875]: I1007 07:57:12.338127 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:12 crc kubenswrapper[4875]: I1007 07:57:12.338153 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:12 crc kubenswrapper[4875]: I1007 07:57:12.338171 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:12Z","lastTransitionTime":"2025-10-07T07:57:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:12 crc kubenswrapper[4875]: I1007 07:57:12.441449 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:12 crc kubenswrapper[4875]: I1007 07:57:12.441530 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:12 crc kubenswrapper[4875]: I1007 07:57:12.441552 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:12 crc kubenswrapper[4875]: I1007 07:57:12.441579 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:12 crc kubenswrapper[4875]: I1007 07:57:12.441598 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:12Z","lastTransitionTime":"2025-10-07T07:57:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:12 crc kubenswrapper[4875]: I1007 07:57:12.527734 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:12 crc kubenswrapper[4875]: I1007 07:57:12.527805 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:12 crc kubenswrapper[4875]: I1007 07:57:12.527822 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:12 crc kubenswrapper[4875]: I1007 07:57:12.527847 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:12 crc kubenswrapper[4875]: I1007 07:57:12.527865 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:12Z","lastTransitionTime":"2025-10-07T07:57:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:12 crc kubenswrapper[4875]: E1007 07:57:12.549203 4875 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T07:57:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:12Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T07:57:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:12Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T07:57:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:12Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T07:57:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:12Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"e390eb55-bfef-4c82-ba02-53426a3fd939\\\",\\\"systemUUID\\\":\\\"288fc606-a984-4194-ac49-303e4a239cb9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:12Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:12 crc kubenswrapper[4875]: I1007 07:57:12.553851 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:12 crc kubenswrapper[4875]: I1007 07:57:12.553985 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:12 crc kubenswrapper[4875]: I1007 07:57:12.554055 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:12 crc kubenswrapper[4875]: I1007 07:57:12.554118 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:12 crc kubenswrapper[4875]: I1007 07:57:12.554192 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:12Z","lastTransitionTime":"2025-10-07T07:57:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:12 crc kubenswrapper[4875]: E1007 07:57:12.572516 4875 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T07:57:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:12Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T07:57:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:12Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T07:57:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:12Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T07:57:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:12Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"e390eb55-bfef-4c82-ba02-53426a3fd939\\\",\\\"systemUUID\\\":\\\"288fc606-a984-4194-ac49-303e4a239cb9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:12Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:12 crc kubenswrapper[4875]: I1007 07:57:12.577534 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:12 crc kubenswrapper[4875]: I1007 07:57:12.577575 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:12 crc kubenswrapper[4875]: I1007 07:57:12.577587 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:12 crc kubenswrapper[4875]: I1007 07:57:12.577604 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:12 crc kubenswrapper[4875]: I1007 07:57:12.577615 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:12Z","lastTransitionTime":"2025-10-07T07:57:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:12 crc kubenswrapper[4875]: E1007 07:57:12.594630 4875 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T07:57:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:12Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T07:57:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:12Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T07:57:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:12Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T07:57:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:12Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"e390eb55-bfef-4c82-ba02-53426a3fd939\\\",\\\"systemUUID\\\":\\\"288fc606-a984-4194-ac49-303e4a239cb9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:12Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:12 crc kubenswrapper[4875]: I1007 07:57:12.600171 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:12 crc kubenswrapper[4875]: I1007 07:57:12.600247 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:12 crc kubenswrapper[4875]: I1007 07:57:12.600269 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:12 crc kubenswrapper[4875]: I1007 07:57:12.600296 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:12 crc kubenswrapper[4875]: I1007 07:57:12.600313 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:12Z","lastTransitionTime":"2025-10-07T07:57:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:12 crc kubenswrapper[4875]: E1007 07:57:12.621186 4875 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T07:57:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:12Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T07:57:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:12Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T07:57:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:12Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T07:57:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:12Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"e390eb55-bfef-4c82-ba02-53426a3fd939\\\",\\\"systemUUID\\\":\\\"288fc606-a984-4194-ac49-303e4a239cb9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:12Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:12 crc kubenswrapper[4875]: I1007 07:57:12.625591 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:12 crc kubenswrapper[4875]: I1007 07:57:12.625794 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:12 crc kubenswrapper[4875]: I1007 07:57:12.625909 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:12 crc kubenswrapper[4875]: I1007 07:57:12.626025 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:12 crc kubenswrapper[4875]: I1007 07:57:12.626124 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:12Z","lastTransitionTime":"2025-10-07T07:57:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:12 crc kubenswrapper[4875]: E1007 07:57:12.640785 4875 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T07:57:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:12Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T07:57:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:12Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T07:57:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:12Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T07:57:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:12Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"e390eb55-bfef-4c82-ba02-53426a3fd939\\\",\\\"systemUUID\\\":\\\"288fc606-a984-4194-ac49-303e4a239cb9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:12Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:12 crc kubenswrapper[4875]: E1007 07:57:12.641012 4875 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Oct 07 07:57:12 crc kubenswrapper[4875]: I1007 07:57:12.642960 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:12 crc kubenswrapper[4875]: I1007 07:57:12.643026 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:12 crc kubenswrapper[4875]: I1007 07:57:12.643052 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:12 crc kubenswrapper[4875]: I1007 07:57:12.643071 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:12 crc kubenswrapper[4875]: I1007 07:57:12.643086 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:12Z","lastTransitionTime":"2025-10-07T07:57:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:12 crc kubenswrapper[4875]: I1007 07:57:12.697595 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-wk8rw" Oct 07 07:57:12 crc kubenswrapper[4875]: E1007 07:57:12.697929 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-wk8rw" podUID="dce21abc-1295-4d45-bd26-07b7e37d674c" Oct 07 07:57:12 crc kubenswrapper[4875]: I1007 07:57:12.697645 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 07:57:12 crc kubenswrapper[4875]: E1007 07:57:12.698659 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 07:57:12 crc kubenswrapper[4875]: I1007 07:57:12.745772 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:12 crc kubenswrapper[4875]: I1007 07:57:12.745837 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:12 crc kubenswrapper[4875]: I1007 07:57:12.745848 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:12 crc kubenswrapper[4875]: I1007 07:57:12.745872 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:12 crc kubenswrapper[4875]: I1007 07:57:12.745908 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:12Z","lastTransitionTime":"2025-10-07T07:57:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:12 crc kubenswrapper[4875]: I1007 07:57:12.848971 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:12 crc kubenswrapper[4875]: I1007 07:57:12.849041 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:12 crc kubenswrapper[4875]: I1007 07:57:12.849064 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:12 crc kubenswrapper[4875]: I1007 07:57:12.849097 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:12 crc kubenswrapper[4875]: I1007 07:57:12.849122 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:12Z","lastTransitionTime":"2025-10-07T07:57:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:12 crc kubenswrapper[4875]: I1007 07:57:12.952190 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:12 crc kubenswrapper[4875]: I1007 07:57:12.952238 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:12 crc kubenswrapper[4875]: I1007 07:57:12.952255 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:12 crc kubenswrapper[4875]: I1007 07:57:12.952277 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:12 crc kubenswrapper[4875]: I1007 07:57:12.952294 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:12Z","lastTransitionTime":"2025-10-07T07:57:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:13 crc kubenswrapper[4875]: I1007 07:57:13.055031 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:13 crc kubenswrapper[4875]: I1007 07:57:13.055087 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:13 crc kubenswrapper[4875]: I1007 07:57:13.055099 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:13 crc kubenswrapper[4875]: I1007 07:57:13.055117 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:13 crc kubenswrapper[4875]: I1007 07:57:13.055130 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:13Z","lastTransitionTime":"2025-10-07T07:57:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:13 crc kubenswrapper[4875]: I1007 07:57:13.157410 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:13 crc kubenswrapper[4875]: I1007 07:57:13.157477 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:13 crc kubenswrapper[4875]: I1007 07:57:13.157501 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:13 crc kubenswrapper[4875]: I1007 07:57:13.157528 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:13 crc kubenswrapper[4875]: I1007 07:57:13.157552 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:13Z","lastTransitionTime":"2025-10-07T07:57:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:13 crc kubenswrapper[4875]: I1007 07:57:13.261195 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:13 crc kubenswrapper[4875]: I1007 07:57:13.261267 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:13 crc kubenswrapper[4875]: I1007 07:57:13.261287 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:13 crc kubenswrapper[4875]: I1007 07:57:13.261310 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:13 crc kubenswrapper[4875]: I1007 07:57:13.261328 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:13Z","lastTransitionTime":"2025-10-07T07:57:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:13 crc kubenswrapper[4875]: I1007 07:57:13.364133 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:13 crc kubenswrapper[4875]: I1007 07:57:13.364197 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:13 crc kubenswrapper[4875]: I1007 07:57:13.364216 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:13 crc kubenswrapper[4875]: I1007 07:57:13.364243 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:13 crc kubenswrapper[4875]: I1007 07:57:13.364260 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:13Z","lastTransitionTime":"2025-10-07T07:57:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:13 crc kubenswrapper[4875]: I1007 07:57:13.468007 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:13 crc kubenswrapper[4875]: I1007 07:57:13.468084 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:13 crc kubenswrapper[4875]: I1007 07:57:13.468102 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:13 crc kubenswrapper[4875]: I1007 07:57:13.468127 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:13 crc kubenswrapper[4875]: I1007 07:57:13.468144 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:13Z","lastTransitionTime":"2025-10-07T07:57:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:13 crc kubenswrapper[4875]: I1007 07:57:13.570999 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:13 crc kubenswrapper[4875]: I1007 07:57:13.571074 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:13 crc kubenswrapper[4875]: I1007 07:57:13.571089 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:13 crc kubenswrapper[4875]: I1007 07:57:13.571109 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:13 crc kubenswrapper[4875]: I1007 07:57:13.571121 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:13Z","lastTransitionTime":"2025-10-07T07:57:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:13 crc kubenswrapper[4875]: I1007 07:57:13.674029 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:13 crc kubenswrapper[4875]: I1007 07:57:13.674091 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:13 crc kubenswrapper[4875]: I1007 07:57:13.674119 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:13 crc kubenswrapper[4875]: I1007 07:57:13.674184 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:13 crc kubenswrapper[4875]: I1007 07:57:13.674208 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:13Z","lastTransitionTime":"2025-10-07T07:57:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:13 crc kubenswrapper[4875]: I1007 07:57:13.697195 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 07:57:13 crc kubenswrapper[4875]: I1007 07:57:13.697215 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 07:57:13 crc kubenswrapper[4875]: E1007 07:57:13.697410 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 07:57:13 crc kubenswrapper[4875]: E1007 07:57:13.697520 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 07:57:13 crc kubenswrapper[4875]: I1007 07:57:13.776664 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:13 crc kubenswrapper[4875]: I1007 07:57:13.776767 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:13 crc kubenswrapper[4875]: I1007 07:57:13.776794 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:13 crc kubenswrapper[4875]: I1007 07:57:13.776837 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:13 crc kubenswrapper[4875]: I1007 07:57:13.776859 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:13Z","lastTransitionTime":"2025-10-07T07:57:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:13 crc kubenswrapper[4875]: I1007 07:57:13.879778 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:13 crc kubenswrapper[4875]: I1007 07:57:13.879835 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:13 crc kubenswrapper[4875]: I1007 07:57:13.879860 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:13 crc kubenswrapper[4875]: I1007 07:57:13.879950 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:13 crc kubenswrapper[4875]: I1007 07:57:13.879975 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:13Z","lastTransitionTime":"2025-10-07T07:57:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:13 crc kubenswrapper[4875]: I1007 07:57:13.982671 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:13 crc kubenswrapper[4875]: I1007 07:57:13.982702 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:13 crc kubenswrapper[4875]: I1007 07:57:13.982726 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:13 crc kubenswrapper[4875]: I1007 07:57:13.982741 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:13 crc kubenswrapper[4875]: I1007 07:57:13.982751 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:13Z","lastTransitionTime":"2025-10-07T07:57:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:14 crc kubenswrapper[4875]: I1007 07:57:14.085933 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:14 crc kubenswrapper[4875]: I1007 07:57:14.085972 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:14 crc kubenswrapper[4875]: I1007 07:57:14.085986 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:14 crc kubenswrapper[4875]: I1007 07:57:14.086004 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:14 crc kubenswrapper[4875]: I1007 07:57:14.086016 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:14Z","lastTransitionTime":"2025-10-07T07:57:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:14 crc kubenswrapper[4875]: I1007 07:57:14.189132 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:14 crc kubenswrapper[4875]: I1007 07:57:14.189174 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:14 crc kubenswrapper[4875]: I1007 07:57:14.189186 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:14 crc kubenswrapper[4875]: I1007 07:57:14.189201 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:14 crc kubenswrapper[4875]: I1007 07:57:14.189212 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:14Z","lastTransitionTime":"2025-10-07T07:57:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:14 crc kubenswrapper[4875]: I1007 07:57:14.292182 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:14 crc kubenswrapper[4875]: I1007 07:57:14.292234 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:14 crc kubenswrapper[4875]: I1007 07:57:14.292249 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:14 crc kubenswrapper[4875]: I1007 07:57:14.292272 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:14 crc kubenswrapper[4875]: I1007 07:57:14.292287 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:14Z","lastTransitionTime":"2025-10-07T07:57:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:14 crc kubenswrapper[4875]: I1007 07:57:14.395278 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:14 crc kubenswrapper[4875]: I1007 07:57:14.395356 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:14 crc kubenswrapper[4875]: I1007 07:57:14.395395 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:14 crc kubenswrapper[4875]: I1007 07:57:14.395426 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:14 crc kubenswrapper[4875]: I1007 07:57:14.395455 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:14Z","lastTransitionTime":"2025-10-07T07:57:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:14 crc kubenswrapper[4875]: I1007 07:57:14.498215 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:14 crc kubenswrapper[4875]: I1007 07:57:14.498275 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:14 crc kubenswrapper[4875]: I1007 07:57:14.498296 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:14 crc kubenswrapper[4875]: I1007 07:57:14.498322 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:14 crc kubenswrapper[4875]: I1007 07:57:14.498341 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:14Z","lastTransitionTime":"2025-10-07T07:57:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:14 crc kubenswrapper[4875]: I1007 07:57:14.601988 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:14 crc kubenswrapper[4875]: I1007 07:57:14.602076 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:14 crc kubenswrapper[4875]: I1007 07:57:14.602103 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:14 crc kubenswrapper[4875]: I1007 07:57:14.602136 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:14 crc kubenswrapper[4875]: I1007 07:57:14.602157 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:14Z","lastTransitionTime":"2025-10-07T07:57:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:14 crc kubenswrapper[4875]: I1007 07:57:14.697373 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 07:57:14 crc kubenswrapper[4875]: I1007 07:57:14.697592 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-wk8rw" Oct 07 07:57:14 crc kubenswrapper[4875]: E1007 07:57:14.697773 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 07:57:14 crc kubenswrapper[4875]: E1007 07:57:14.697989 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-wk8rw" podUID="dce21abc-1295-4d45-bd26-07b7e37d674c" Oct 07 07:57:14 crc kubenswrapper[4875]: I1007 07:57:14.706349 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:14 crc kubenswrapper[4875]: I1007 07:57:14.706446 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:14 crc kubenswrapper[4875]: I1007 07:57:14.706470 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:14 crc kubenswrapper[4875]: I1007 07:57:14.706497 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:14 crc kubenswrapper[4875]: I1007 07:57:14.706516 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:14Z","lastTransitionTime":"2025-10-07T07:57:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:14 crc kubenswrapper[4875]: I1007 07:57:14.809190 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:14 crc kubenswrapper[4875]: I1007 07:57:14.809244 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:14 crc kubenswrapper[4875]: I1007 07:57:14.809255 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:14 crc kubenswrapper[4875]: I1007 07:57:14.809272 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:14 crc kubenswrapper[4875]: I1007 07:57:14.809283 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:14Z","lastTransitionTime":"2025-10-07T07:57:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:14 crc kubenswrapper[4875]: I1007 07:57:14.912139 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:14 crc kubenswrapper[4875]: I1007 07:57:14.912212 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:14 crc kubenswrapper[4875]: I1007 07:57:14.912233 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:14 crc kubenswrapper[4875]: I1007 07:57:14.912260 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:14 crc kubenswrapper[4875]: I1007 07:57:14.912281 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:14Z","lastTransitionTime":"2025-10-07T07:57:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:15 crc kubenswrapper[4875]: I1007 07:57:15.015576 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:15 crc kubenswrapper[4875]: I1007 07:57:15.015700 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:15 crc kubenswrapper[4875]: I1007 07:57:15.015722 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:15 crc kubenswrapper[4875]: I1007 07:57:15.015750 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:15 crc kubenswrapper[4875]: I1007 07:57:15.015768 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:15Z","lastTransitionTime":"2025-10-07T07:57:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:15 crc kubenswrapper[4875]: I1007 07:57:15.119405 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:15 crc kubenswrapper[4875]: I1007 07:57:15.119458 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:15 crc kubenswrapper[4875]: I1007 07:57:15.119474 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:15 crc kubenswrapper[4875]: I1007 07:57:15.119498 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:15 crc kubenswrapper[4875]: I1007 07:57:15.119513 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:15Z","lastTransitionTime":"2025-10-07T07:57:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:15 crc kubenswrapper[4875]: I1007 07:57:15.221966 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:15 crc kubenswrapper[4875]: I1007 07:57:15.222064 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:15 crc kubenswrapper[4875]: I1007 07:57:15.222083 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:15 crc kubenswrapper[4875]: I1007 07:57:15.222113 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:15 crc kubenswrapper[4875]: I1007 07:57:15.222132 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:15Z","lastTransitionTime":"2025-10-07T07:57:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:15 crc kubenswrapper[4875]: I1007 07:57:15.325600 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:15 crc kubenswrapper[4875]: I1007 07:57:15.325660 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:15 crc kubenswrapper[4875]: I1007 07:57:15.325678 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:15 crc kubenswrapper[4875]: I1007 07:57:15.325704 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:15 crc kubenswrapper[4875]: I1007 07:57:15.325722 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:15Z","lastTransitionTime":"2025-10-07T07:57:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:15 crc kubenswrapper[4875]: I1007 07:57:15.428963 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:15 crc kubenswrapper[4875]: I1007 07:57:15.429082 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:15 crc kubenswrapper[4875]: I1007 07:57:15.429109 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:15 crc kubenswrapper[4875]: I1007 07:57:15.429146 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:15 crc kubenswrapper[4875]: I1007 07:57:15.429173 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:15Z","lastTransitionTime":"2025-10-07T07:57:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:15 crc kubenswrapper[4875]: I1007 07:57:15.532146 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:15 crc kubenswrapper[4875]: I1007 07:57:15.532202 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:15 crc kubenswrapper[4875]: I1007 07:57:15.532219 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:15 crc kubenswrapper[4875]: I1007 07:57:15.532257 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:15 crc kubenswrapper[4875]: I1007 07:57:15.532274 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:15Z","lastTransitionTime":"2025-10-07T07:57:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:15 crc kubenswrapper[4875]: I1007 07:57:15.618807 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 07 07:57:15 crc kubenswrapper[4875]: I1007 07:57:15.635041 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:15 crc kubenswrapper[4875]: I1007 07:57:15.635083 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:15 crc kubenswrapper[4875]: I1007 07:57:15.635094 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:15 crc kubenswrapper[4875]: I1007 07:57:15.635110 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:15 crc kubenswrapper[4875]: I1007 07:57:15.635121 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:15Z","lastTransitionTime":"2025-10-07T07:57:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:15 crc kubenswrapper[4875]: I1007 07:57:15.639577 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:15Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:15 crc kubenswrapper[4875]: I1007 07:57:15.655706 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-9tw9m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f8b02a8a-b8d2-4097-b768-132e4c46938a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f6efce0afada950650dfd1a7345a5132509ead755458e53915fcf3335ffe73e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-9tw9m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:15Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:15 crc kubenswrapper[4875]: I1007 07:57:15.671640 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4ed41f5e-e186-4ede-a0a5-d5a015781ba1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce6527e17c61ccce5c81a87b9a6d3139c9f17a0736c08dfbb8907610d6270adb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c8f4b6a57610b6440afa3652ea5c2926072ad3bc658c72c30c8d28a2832cf900\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8a810dc9c2d8d92af77e0b4c1a21ab24e7da3de04f9c1b00b8399ec0b2cf4ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://965fc2ef9b26d12ee6c5e6adb88200cb52574c8f861f08d1f767c23ea861e6ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://965fc2ef9b26d12ee6c5e6adb88200cb52574c8f861f08d1f767c23ea861e6ef\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:16Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:15Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:15Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:15 crc kubenswrapper[4875]: I1007 07:57:15.689709 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:15Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:15 crc kubenswrapper[4875]: I1007 07:57:15.696624 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 07:57:15 crc kubenswrapper[4875]: I1007 07:57:15.696662 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 07:57:15 crc kubenswrapper[4875]: E1007 07:57:15.696744 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 07:57:15 crc kubenswrapper[4875]: E1007 07:57:15.696826 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 07:57:15 crc kubenswrapper[4875]: I1007 07:57:15.711275 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-zk2kz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7021fc56-b485-4ca6-80e8-56665ade004f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff571e5d1552af3387066929e3c02a939848e65fbf853aec600b03e0f72d5d3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d91377f30f2c7676df79d679bad49e0a162667214bd7e6ed4ad8ef3606a8752\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d91377f30f2c7676df79d679bad49e0a162667214bd7e6ed4ad8ef3606a8752\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecf0876b0824ab4fd9c692b0455050ac0a2d61663f74f13f578aebd6f3869f3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ecf0876b0824ab4fd9c692b0455050ac0a2d61663f74f13f578aebd6f3869f3f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9714afc60a059f6aa35d965ae1e15ea03e01cf9162a44d632ceed9d772e966bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9714afc60a059f6aa35d965ae1e15ea03e01cf9162a44d632ceed9d772e966bf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1f6109783534c4c80c81daf9eb0f04a9fbced0b143aeaa0c76683b9d2d38669\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c1f6109783534c4c80c81daf9eb0f04a9fbced0b143aeaa0c76683b9d2d38669\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c07a8fe8d39f3de33d3b132c33386a5c63a0948a1205f48e9a8788dd77ba8bc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c07a8fe8d39f3de33d3b132c33386a5c63a0948a1205f48e9a8788dd77ba8bc2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bce803064d5256265cdaf7f6e16d02780eab07d41d1f547dedd1c1b85972f543\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bce803064d5256265cdaf7f6e16d02780eab07d41d1f547dedd1c1b85972f543\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-zk2kz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:15Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:15 crc kubenswrapper[4875]: I1007 07:57:15.731734 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62ae4c53-7cf1-4229-84b4-045397dbcfba\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ae8881986c1b2abd841adfadbac7dc3d73096c5366485350536c08f478f9762\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://05f20f2103458077317614bcd5338d803c52c67e7dfc562103c817d33fa5bc79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f2213a20d36b8a125c6004957d5be402e0b85bbc2165ebd964ab499dfffb877\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://841f5c2218ad3912bf23dc2df80f24ca696829ba8e2c2d9722264688b25f0846\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:15Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:15Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:15 crc kubenswrapper[4875]: I1007 07:57:15.736988 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:15 crc kubenswrapper[4875]: I1007 07:57:15.737033 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:15 crc kubenswrapper[4875]: I1007 07:57:15.737049 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:15 crc kubenswrapper[4875]: I1007 07:57:15.737070 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:15 crc kubenswrapper[4875]: I1007 07:57:15.737086 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:15Z","lastTransitionTime":"2025-10-07T07:57:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:15 crc kubenswrapper[4875]: I1007 07:57:15.755615 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3527b3b9-1734-481d-9d80-9093e388afec\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7149243f8781d5a5e845821a8016bc06ec3678d69dfbde72d3429c48c64a98b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51d55feee1c9378afae0885cd063f39a2b4bd057db72c776941837d8d4098132\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6dc29635af9843109e3c200fc756d352e1a81b01dcc52a30a005291d1fa9f8d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e0b3573713f1b9d23177e5a1412d8db366ca718e6c19f6af9e234940505f2e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20382715fd9d79a3b60b16d950d6c4fe5b0d34ae14ad034f684d97762205263a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cfae59bc7d6183b2b5c6c04dd53b1bfc8b62abf90f67f74c7a56bf5039a5b50a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cfae59bc7d6183b2b5c6c04dd53b1bfc8b62abf90f67f74c7a56bf5039a5b50a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88968e9f639dc3664a87729128c531283b37b7c2da6aae70905f9f63a980fb54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88968e9f639dc3664a87729128c531283b37b7c2da6aae70905f9f63a980fb54\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://1ae0cbc996e8c2d24e8fbbf34b292d994bcc704c7b27fdc8b7fb7e64be0902ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1ae0cbc996e8c2d24e8fbbf34b292d994bcc704c7b27fdc8b7fb7e64be0902ca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:15Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:15Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:15 crc kubenswrapper[4875]: I1007 07:57:15.769975 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://455d92e6c0a43eabcbdc94940cb5bb0308a747e311fb79711b5ec8586d2f290c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e98da4809cdbbaa0bc7a3ed73df6eeb9260101f8336e16e3e93cba2be423f957\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:15Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:15 crc kubenswrapper[4875]: I1007 07:57:15.809096 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-8tcxj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f7806e48-48e7-4680-af2e-e93a05003370\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e5e9d96fa7102ca0dcbb0fcc156b21899217a32baae85be23379cca6c8a6a93c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2df6aadbe568d17b58738f194596f0e83432176769f7c5bd87e7d4000d3f1ed8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5eb9157d6f41a164c7cb952c983b441030a97a8f3b8009359440c5eba9846f26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8c31cbcb48e003dac873f108a5eb3aaff8d7016f7557fb683ae607d7819d7ceb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c944396320312ab92eb8dd7f1d771a00b965031717edbbeb2b467cee0788dec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6db9e77922c120dbfa1ccd55c96777ac3b18e0fcf8bb96956414858ae2c8fcff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://75be2ad687d37c776e821aa271a95c7dda1c9bd5d9c705202fb41c7d27d8c3eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75be2ad687d37c776e821aa271a95c7dda1c9bd5d9c705202fb41c7d27d8c3eb\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-07T07:57:07Z\\\",\\\"message\\\":\\\" start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:07Z is after 2025-08-24T17:21:41Z]\\\\nI1007 07:57:07.568842 6518 services_controller.go:473] Services do not match for network=default, existing lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-marketplace/redhat-marketplace_TCP_cluster\\\\\\\", UUID:\\\\\\\"97b6e7b0-06ca-455e-8259-06895040cb0c\\\\\\\", Protocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-marketplace/redhat-marketplace\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T07:57:06Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-8tcxj_openshift-ovn-kubernetes(f7806e48-48e7-4680-af2e-e93a05003370)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b3b1269130a0a1192546bec4aa5b2d4bfd65f381f6cd3948dd54d49b482e6d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e7cdfd8fed2f21e7063841e5382e668130288feffd52ef6fc5f9b5bc1894f2a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e7cdfd8fed2f21e7063841e5382e668130288feffd52ef6fc5f9b5bc1894f2a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-8tcxj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:15Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:15 crc kubenswrapper[4875]: I1007 07:57:15.821584 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ddx6l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0605d52-0cfc-4bcf-9218-1991257047cd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bdc7823b5b4f40adc245f9650864ebcb03f791dcbd6167f41c362bcdcc3a6655\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l9bwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:39Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ddx6l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:15Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:15 crc kubenswrapper[4875]: I1007 07:57:15.834336 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rbr4j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1d636971-3387-4f3c-b4a1-54a1da1e2fbe\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c836128e294716bdbbc7bf57f81f71a88656e83be509510722d13b3f66e7d3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7bd9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://12ef6aa200be5c0cc9b34435271b54e4852ad9118d7334d57e07c1f6f75100f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7bd9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:48Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-rbr4j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:15Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:15 crc kubenswrapper[4875]: I1007 07:57:15.839197 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:15 crc kubenswrapper[4875]: I1007 07:57:15.839246 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:15 crc kubenswrapper[4875]: I1007 07:57:15.839257 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:15 crc kubenswrapper[4875]: I1007 07:57:15.839275 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:15 crc kubenswrapper[4875]: I1007 07:57:15.839287 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:15Z","lastTransitionTime":"2025-10-07T07:57:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:15 crc kubenswrapper[4875]: I1007 07:57:15.849787 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"01cd6f59-4f16-41d3-b43b-cd3cb9efd9b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ca1c70fa4c2dd9e87cc15766e27c2735768f392d019d6004ae50c94595651c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4bed5cf79847998d3d72c69b2f5773d64affb1a1f13ecc3af99bda8307bb8d1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ba6ab5ec6a70a00059ba119970c6b139879c3c568475f2acf45b00f4ade9e22\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10e2645f8bd2ed4e578809e1a15c65b5a0bf19a2bc135dd5f626b2b3dfce0113\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://671f4d3e518bf0230af61a6bfc3f70b8cea7f0e8f972ad605a78cded0306b084\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"message\\\":\\\"le observer\\\\nW1007 07:56:35.379471 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1007 07:56:35.379613 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1007 07:56:35.380419 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1733405231/tls.crt::/tmp/serving-cert-1733405231/tls.key\\\\\\\"\\\\nI1007 07:56:35.960385 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1007 07:56:35.963357 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1007 07:56:35.963380 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1007 07:56:35.963401 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1007 07:56:35.963407 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1007 07:56:35.978178 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1007 07:56:35.978231 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1007 07:56:35.978242 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI1007 07:56:35.978236 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1007 07:56:35.978254 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1007 07:56:35.978304 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1007 07:56:35.978312 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1007 07:56:35.978319 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1007 07:56:35.979012 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0b72e1d82ba1cfc118ea122535db5c358f503a17b497c21c481e1b56bb578e8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec502d866371b40ecc3c8728889725cc1c2d9c8be18dbce17935992980fc1b9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec502d866371b40ecc3c8728889725cc1c2d9c8be18dbce17935992980fc1b9a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:15Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:15 crc kubenswrapper[4875]: I1007 07:57:15.863670 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:15Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:15 crc kubenswrapper[4875]: I1007 07:57:15.876718 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43e570e7b8391b0f38f3228dc0380ac8ee23e83f498989343bdee6a53c3ede41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:15Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:15 crc kubenswrapper[4875]: I1007 07:57:15.888191 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01fe5bea18c473e716831e2a9a876a8517d6de78b9b870929ecc1c4397d33fb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:15Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:15 crc kubenswrapper[4875]: I1007 07:57:15.900971 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wc2jq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e5a790e1-c591-4cfc-930f-4805a923790b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0fda9fd366641ca5799c0ca6c7aa926af6b41609e157c11a15b2077d4a5ce3aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4s782\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wc2jq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:15Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:15 crc kubenswrapper[4875]: I1007 07:57:15.911367 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3928c10c-c3da-41eb-96b2-629d67cfb31f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://27e5a1d7a65296f23b224f9ace2760c32b6e1ca146cf7a42a601de22632250d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qd4f6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34b235fca1c383e868641a953bb908901bcd10528ee9fb668c1f71eb6e0e978a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qd4f6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hx68m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:15Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:15 crc kubenswrapper[4875]: I1007 07:57:15.921068 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-wk8rw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dce21abc-1295-4d45-bd26-07b7e37d674c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:50Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:50Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jx7vv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jx7vv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:50Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-wk8rw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:15Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:15 crc kubenswrapper[4875]: I1007 07:57:15.932355 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01fe5bea18c473e716831e2a9a876a8517d6de78b9b870929ecc1c4397d33fb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:15Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:15 crc kubenswrapper[4875]: I1007 07:57:15.942317 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:15 crc kubenswrapper[4875]: I1007 07:57:15.942360 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:15 crc kubenswrapper[4875]: I1007 07:57:15.942372 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:15 crc kubenswrapper[4875]: I1007 07:57:15.942388 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:15 crc kubenswrapper[4875]: I1007 07:57:15.942402 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:15Z","lastTransitionTime":"2025-10-07T07:57:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:15 crc kubenswrapper[4875]: I1007 07:57:15.944439 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wc2jq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e5a790e1-c591-4cfc-930f-4805a923790b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0fda9fd366641ca5799c0ca6c7aa926af6b41609e157c11a15b2077d4a5ce3aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4s782\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wc2jq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:15Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:15 crc kubenswrapper[4875]: I1007 07:57:15.955274 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3928c10c-c3da-41eb-96b2-629d67cfb31f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://27e5a1d7a65296f23b224f9ace2760c32b6e1ca146cf7a42a601de22632250d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qd4f6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34b235fca1c383e868641a953bb908901bcd10528ee9fb668c1f71eb6e0e978a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qd4f6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hx68m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:15Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:15 crc kubenswrapper[4875]: I1007 07:57:15.965614 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rbr4j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1d636971-3387-4f3c-b4a1-54a1da1e2fbe\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c836128e294716bdbbc7bf57f81f71a88656e83be509510722d13b3f66e7d3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7bd9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://12ef6aa200be5c0cc9b34435271b54e4852ad9118d7334d57e07c1f6f75100f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7bd9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:48Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-rbr4j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:15Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:15 crc kubenswrapper[4875]: I1007 07:57:15.978128 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"01cd6f59-4f16-41d3-b43b-cd3cb9efd9b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ca1c70fa4c2dd9e87cc15766e27c2735768f392d019d6004ae50c94595651c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4bed5cf79847998d3d72c69b2f5773d64affb1a1f13ecc3af99bda8307bb8d1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ba6ab5ec6a70a00059ba119970c6b139879c3c568475f2acf45b00f4ade9e22\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10e2645f8bd2ed4e578809e1a15c65b5a0bf19a2bc135dd5f626b2b3dfce0113\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://671f4d3e518bf0230af61a6bfc3f70b8cea7f0e8f972ad605a78cded0306b084\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"message\\\":\\\"le observer\\\\nW1007 07:56:35.379471 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1007 07:56:35.379613 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1007 07:56:35.380419 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1733405231/tls.crt::/tmp/serving-cert-1733405231/tls.key\\\\\\\"\\\\nI1007 07:56:35.960385 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1007 07:56:35.963357 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1007 07:56:35.963380 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1007 07:56:35.963401 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1007 07:56:35.963407 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1007 07:56:35.978178 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1007 07:56:35.978231 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1007 07:56:35.978242 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI1007 07:56:35.978236 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1007 07:56:35.978254 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1007 07:56:35.978304 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1007 07:56:35.978312 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1007 07:56:35.978319 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1007 07:56:35.979012 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0b72e1d82ba1cfc118ea122535db5c358f503a17b497c21c481e1b56bb578e8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec502d866371b40ecc3c8728889725cc1c2d9c8be18dbce17935992980fc1b9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec502d866371b40ecc3c8728889725cc1c2d9c8be18dbce17935992980fc1b9a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:15Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:15 crc kubenswrapper[4875]: I1007 07:57:15.988282 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:15Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:16 crc kubenswrapper[4875]: I1007 07:57:16.000929 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43e570e7b8391b0f38f3228dc0380ac8ee23e83f498989343bdee6a53c3ede41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:15Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:16 crc kubenswrapper[4875]: I1007 07:57:16.009581 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-wk8rw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dce21abc-1295-4d45-bd26-07b7e37d674c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:50Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:50Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jx7vv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jx7vv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:50Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-wk8rw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:16Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:16 crc kubenswrapper[4875]: I1007 07:57:16.022256 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:16Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:16 crc kubenswrapper[4875]: I1007 07:57:16.032726 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-9tw9m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f8b02a8a-b8d2-4097-b768-132e4c46938a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f6efce0afada950650dfd1a7345a5132509ead755458e53915fcf3335ffe73e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-9tw9m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:16Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:16 crc kubenswrapper[4875]: I1007 07:57:16.043935 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4ed41f5e-e186-4ede-a0a5-d5a015781ba1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce6527e17c61ccce5c81a87b9a6d3139c9f17a0736c08dfbb8907610d6270adb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c8f4b6a57610b6440afa3652ea5c2926072ad3bc658c72c30c8d28a2832cf900\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8a810dc9c2d8d92af77e0b4c1a21ab24e7da3de04f9c1b00b8399ec0b2cf4ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://965fc2ef9b26d12ee6c5e6adb88200cb52574c8f861f08d1f767c23ea861e6ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://965fc2ef9b26d12ee6c5e6adb88200cb52574c8f861f08d1f767c23ea861e6ef\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:16Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:15Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:16Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:16 crc kubenswrapper[4875]: I1007 07:57:16.044182 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:16 crc kubenswrapper[4875]: I1007 07:57:16.044226 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:16 crc kubenswrapper[4875]: I1007 07:57:16.044241 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:16 crc kubenswrapper[4875]: I1007 07:57:16.044260 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:16 crc kubenswrapper[4875]: I1007 07:57:16.044274 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:16Z","lastTransitionTime":"2025-10-07T07:57:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:16 crc kubenswrapper[4875]: I1007 07:57:16.055697 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:16Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:16 crc kubenswrapper[4875]: I1007 07:57:16.068777 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-zk2kz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7021fc56-b485-4ca6-80e8-56665ade004f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff571e5d1552af3387066929e3c02a939848e65fbf853aec600b03e0f72d5d3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d91377f30f2c7676df79d679bad49e0a162667214bd7e6ed4ad8ef3606a8752\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d91377f30f2c7676df79d679bad49e0a162667214bd7e6ed4ad8ef3606a8752\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecf0876b0824ab4fd9c692b0455050ac0a2d61663f74f13f578aebd6f3869f3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ecf0876b0824ab4fd9c692b0455050ac0a2d61663f74f13f578aebd6f3869f3f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9714afc60a059f6aa35d965ae1e15ea03e01cf9162a44d632ceed9d772e966bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9714afc60a059f6aa35d965ae1e15ea03e01cf9162a44d632ceed9d772e966bf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1f6109783534c4c80c81daf9eb0f04a9fbced0b143aeaa0c76683b9d2d38669\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c1f6109783534c4c80c81daf9eb0f04a9fbced0b143aeaa0c76683b9d2d38669\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c07a8fe8d39f3de33d3b132c33386a5c63a0948a1205f48e9a8788dd77ba8bc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c07a8fe8d39f3de33d3b132c33386a5c63a0948a1205f48e9a8788dd77ba8bc2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bce803064d5256265cdaf7f6e16d02780eab07d41d1f547dedd1c1b85972f543\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bce803064d5256265cdaf7f6e16d02780eab07d41d1f547dedd1c1b85972f543\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-zk2kz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:16Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:16 crc kubenswrapper[4875]: I1007 07:57:16.077489 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ddx6l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0605d52-0cfc-4bcf-9218-1991257047cd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bdc7823b5b4f40adc245f9650864ebcb03f791dcbd6167f41c362bcdcc3a6655\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l9bwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:39Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ddx6l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:16Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:16 crc kubenswrapper[4875]: I1007 07:57:16.086603 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62ae4c53-7cf1-4229-84b4-045397dbcfba\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ae8881986c1b2abd841adfadbac7dc3d73096c5366485350536c08f478f9762\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://05f20f2103458077317614bcd5338d803c52c67e7dfc562103c817d33fa5bc79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f2213a20d36b8a125c6004957d5be402e0b85bbc2165ebd964ab499dfffb877\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://841f5c2218ad3912bf23dc2df80f24ca696829ba8e2c2d9722264688b25f0846\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:15Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:16Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:16 crc kubenswrapper[4875]: I1007 07:57:16.103369 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3527b3b9-1734-481d-9d80-9093e388afec\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7149243f8781d5a5e845821a8016bc06ec3678d69dfbde72d3429c48c64a98b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51d55feee1c9378afae0885cd063f39a2b4bd057db72c776941837d8d4098132\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6dc29635af9843109e3c200fc756d352e1a81b01dcc52a30a005291d1fa9f8d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e0b3573713f1b9d23177e5a1412d8db366ca718e6c19f6af9e234940505f2e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20382715fd9d79a3b60b16d950d6c4fe5b0d34ae14ad034f684d97762205263a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cfae59bc7d6183b2b5c6c04dd53b1bfc8b62abf90f67f74c7a56bf5039a5b50a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cfae59bc7d6183b2b5c6c04dd53b1bfc8b62abf90f67f74c7a56bf5039a5b50a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88968e9f639dc3664a87729128c531283b37b7c2da6aae70905f9f63a980fb54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88968e9f639dc3664a87729128c531283b37b7c2da6aae70905f9f63a980fb54\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://1ae0cbc996e8c2d24e8fbbf34b292d994bcc704c7b27fdc8b7fb7e64be0902ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1ae0cbc996e8c2d24e8fbbf34b292d994bcc704c7b27fdc8b7fb7e64be0902ca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:15Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:16Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:16 crc kubenswrapper[4875]: I1007 07:57:16.117248 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://455d92e6c0a43eabcbdc94940cb5bb0308a747e311fb79711b5ec8586d2f290c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e98da4809cdbbaa0bc7a3ed73df6eeb9260101f8336e16e3e93cba2be423f957\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:16Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:16 crc kubenswrapper[4875]: I1007 07:57:16.138457 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-8tcxj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f7806e48-48e7-4680-af2e-e93a05003370\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e5e9d96fa7102ca0dcbb0fcc156b21899217a32baae85be23379cca6c8a6a93c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2df6aadbe568d17b58738f194596f0e83432176769f7c5bd87e7d4000d3f1ed8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5eb9157d6f41a164c7cb952c983b441030a97a8f3b8009359440c5eba9846f26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8c31cbcb48e003dac873f108a5eb3aaff8d7016f7557fb683ae607d7819d7ceb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c944396320312ab92eb8dd7f1d771a00b965031717edbbeb2b467cee0788dec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6db9e77922c120dbfa1ccd55c96777ac3b18e0fcf8bb96956414858ae2c8fcff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://75be2ad687d37c776e821aa271a95c7dda1c9bd5d9c705202fb41c7d27d8c3eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75be2ad687d37c776e821aa271a95c7dda1c9bd5d9c705202fb41c7d27d8c3eb\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-07T07:57:07Z\\\",\\\"message\\\":\\\" start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:07Z is after 2025-08-24T17:21:41Z]\\\\nI1007 07:57:07.568842 6518 services_controller.go:473] Services do not match for network=default, existing lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-marketplace/redhat-marketplace_TCP_cluster\\\\\\\", UUID:\\\\\\\"97b6e7b0-06ca-455e-8259-06895040cb0c\\\\\\\", Protocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-marketplace/redhat-marketplace\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T07:57:06Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-8tcxj_openshift-ovn-kubernetes(f7806e48-48e7-4680-af2e-e93a05003370)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b3b1269130a0a1192546bec4aa5b2d4bfd65f381f6cd3948dd54d49b482e6d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e7cdfd8fed2f21e7063841e5382e668130288feffd52ef6fc5f9b5bc1894f2a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e7cdfd8fed2f21e7063841e5382e668130288feffd52ef6fc5f9b5bc1894f2a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-8tcxj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:16Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:16 crc kubenswrapper[4875]: I1007 07:57:16.145621 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:16 crc kubenswrapper[4875]: I1007 07:57:16.145647 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:16 crc kubenswrapper[4875]: I1007 07:57:16.145656 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:16 crc kubenswrapper[4875]: I1007 07:57:16.145668 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:16 crc kubenswrapper[4875]: I1007 07:57:16.145677 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:16Z","lastTransitionTime":"2025-10-07T07:57:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:16 crc kubenswrapper[4875]: I1007 07:57:16.247859 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:16 crc kubenswrapper[4875]: I1007 07:57:16.247964 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:16 crc kubenswrapper[4875]: I1007 07:57:16.247998 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:16 crc kubenswrapper[4875]: I1007 07:57:16.248028 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:16 crc kubenswrapper[4875]: I1007 07:57:16.248050 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:16Z","lastTransitionTime":"2025-10-07T07:57:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:16 crc kubenswrapper[4875]: I1007 07:57:16.353752 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:16 crc kubenswrapper[4875]: I1007 07:57:16.353787 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:16 crc kubenswrapper[4875]: I1007 07:57:16.353797 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:16 crc kubenswrapper[4875]: I1007 07:57:16.353811 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:16 crc kubenswrapper[4875]: I1007 07:57:16.353819 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:16Z","lastTransitionTime":"2025-10-07T07:57:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:16 crc kubenswrapper[4875]: I1007 07:57:16.456656 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:16 crc kubenswrapper[4875]: I1007 07:57:16.456694 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:16 crc kubenswrapper[4875]: I1007 07:57:16.456703 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:16 crc kubenswrapper[4875]: I1007 07:57:16.456719 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:16 crc kubenswrapper[4875]: I1007 07:57:16.456728 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:16Z","lastTransitionTime":"2025-10-07T07:57:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:16 crc kubenswrapper[4875]: I1007 07:57:16.560942 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:16 crc kubenswrapper[4875]: I1007 07:57:16.560989 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:16 crc kubenswrapper[4875]: I1007 07:57:16.561004 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:16 crc kubenswrapper[4875]: I1007 07:57:16.561027 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:16 crc kubenswrapper[4875]: I1007 07:57:16.561043 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:16Z","lastTransitionTime":"2025-10-07T07:57:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:16 crc kubenswrapper[4875]: I1007 07:57:16.663672 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:16 crc kubenswrapper[4875]: I1007 07:57:16.663740 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:16 crc kubenswrapper[4875]: I1007 07:57:16.663763 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:16 crc kubenswrapper[4875]: I1007 07:57:16.663794 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:16 crc kubenswrapper[4875]: I1007 07:57:16.663817 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:16Z","lastTransitionTime":"2025-10-07T07:57:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:16 crc kubenswrapper[4875]: I1007 07:57:16.697009 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-wk8rw" Oct 07 07:57:16 crc kubenswrapper[4875]: I1007 07:57:16.697091 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 07:57:16 crc kubenswrapper[4875]: E1007 07:57:16.697165 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-wk8rw" podUID="dce21abc-1295-4d45-bd26-07b7e37d674c" Oct 07 07:57:16 crc kubenswrapper[4875]: E1007 07:57:16.697226 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 07:57:16 crc kubenswrapper[4875]: I1007 07:57:16.766099 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:16 crc kubenswrapper[4875]: I1007 07:57:16.766172 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:16 crc kubenswrapper[4875]: I1007 07:57:16.766191 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:16 crc kubenswrapper[4875]: I1007 07:57:16.766217 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:16 crc kubenswrapper[4875]: I1007 07:57:16.766235 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:16Z","lastTransitionTime":"2025-10-07T07:57:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:16 crc kubenswrapper[4875]: I1007 07:57:16.869955 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:16 crc kubenswrapper[4875]: I1007 07:57:16.870021 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:16 crc kubenswrapper[4875]: I1007 07:57:16.870038 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:16 crc kubenswrapper[4875]: I1007 07:57:16.870065 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:16 crc kubenswrapper[4875]: I1007 07:57:16.870088 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:16Z","lastTransitionTime":"2025-10-07T07:57:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:16 crc kubenswrapper[4875]: I1007 07:57:16.973338 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:16 crc kubenswrapper[4875]: I1007 07:57:16.973389 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:16 crc kubenswrapper[4875]: I1007 07:57:16.973424 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:16 crc kubenswrapper[4875]: I1007 07:57:16.973447 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:16 crc kubenswrapper[4875]: I1007 07:57:16.973461 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:16Z","lastTransitionTime":"2025-10-07T07:57:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:17 crc kubenswrapper[4875]: I1007 07:57:17.076305 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:17 crc kubenswrapper[4875]: I1007 07:57:17.076364 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:17 crc kubenswrapper[4875]: I1007 07:57:17.076381 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:17 crc kubenswrapper[4875]: I1007 07:57:17.076405 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:17 crc kubenswrapper[4875]: I1007 07:57:17.076425 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:17Z","lastTransitionTime":"2025-10-07T07:57:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:17 crc kubenswrapper[4875]: I1007 07:57:17.178192 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:17 crc kubenswrapper[4875]: I1007 07:57:17.178242 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:17 crc kubenswrapper[4875]: I1007 07:57:17.178251 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:17 crc kubenswrapper[4875]: I1007 07:57:17.178269 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:17 crc kubenswrapper[4875]: I1007 07:57:17.178280 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:17Z","lastTransitionTime":"2025-10-07T07:57:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:17 crc kubenswrapper[4875]: I1007 07:57:17.280646 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:17 crc kubenswrapper[4875]: I1007 07:57:17.280690 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:17 crc kubenswrapper[4875]: I1007 07:57:17.280707 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:17 crc kubenswrapper[4875]: I1007 07:57:17.280727 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:17 crc kubenswrapper[4875]: I1007 07:57:17.280741 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:17Z","lastTransitionTime":"2025-10-07T07:57:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:17 crc kubenswrapper[4875]: I1007 07:57:17.383628 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:17 crc kubenswrapper[4875]: I1007 07:57:17.383701 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:17 crc kubenswrapper[4875]: I1007 07:57:17.383725 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:17 crc kubenswrapper[4875]: I1007 07:57:17.383754 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:17 crc kubenswrapper[4875]: I1007 07:57:17.383776 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:17Z","lastTransitionTime":"2025-10-07T07:57:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:17 crc kubenswrapper[4875]: I1007 07:57:17.487297 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:17 crc kubenswrapper[4875]: I1007 07:57:17.487390 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:17 crc kubenswrapper[4875]: I1007 07:57:17.487399 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:17 crc kubenswrapper[4875]: I1007 07:57:17.487416 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:17 crc kubenswrapper[4875]: I1007 07:57:17.487426 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:17Z","lastTransitionTime":"2025-10-07T07:57:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:17 crc kubenswrapper[4875]: I1007 07:57:17.590456 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:17 crc kubenswrapper[4875]: I1007 07:57:17.590517 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:17 crc kubenswrapper[4875]: I1007 07:57:17.590531 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:17 crc kubenswrapper[4875]: I1007 07:57:17.590550 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:17 crc kubenswrapper[4875]: I1007 07:57:17.590561 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:17Z","lastTransitionTime":"2025-10-07T07:57:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:17 crc kubenswrapper[4875]: I1007 07:57:17.693235 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:17 crc kubenswrapper[4875]: I1007 07:57:17.693285 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:17 crc kubenswrapper[4875]: I1007 07:57:17.693301 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:17 crc kubenswrapper[4875]: I1007 07:57:17.693319 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:17 crc kubenswrapper[4875]: I1007 07:57:17.693328 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:17Z","lastTransitionTime":"2025-10-07T07:57:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:17 crc kubenswrapper[4875]: I1007 07:57:17.696755 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 07:57:17 crc kubenswrapper[4875]: I1007 07:57:17.696801 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 07:57:17 crc kubenswrapper[4875]: E1007 07:57:17.696849 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 07:57:17 crc kubenswrapper[4875]: E1007 07:57:17.696958 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 07:57:17 crc kubenswrapper[4875]: I1007 07:57:17.795844 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:17 crc kubenswrapper[4875]: I1007 07:57:17.795953 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:17 crc kubenswrapper[4875]: I1007 07:57:17.795980 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:17 crc kubenswrapper[4875]: I1007 07:57:17.796011 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:17 crc kubenswrapper[4875]: I1007 07:57:17.796047 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:17Z","lastTransitionTime":"2025-10-07T07:57:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:17 crc kubenswrapper[4875]: I1007 07:57:17.898839 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:17 crc kubenswrapper[4875]: I1007 07:57:17.898906 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:17 crc kubenswrapper[4875]: I1007 07:57:17.898920 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:17 crc kubenswrapper[4875]: I1007 07:57:17.898938 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:17 crc kubenswrapper[4875]: I1007 07:57:17.898952 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:17Z","lastTransitionTime":"2025-10-07T07:57:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:18 crc kubenswrapper[4875]: I1007 07:57:18.001753 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:18 crc kubenswrapper[4875]: I1007 07:57:18.001806 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:18 crc kubenswrapper[4875]: I1007 07:57:18.001819 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:18 crc kubenswrapper[4875]: I1007 07:57:18.001851 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:18 crc kubenswrapper[4875]: I1007 07:57:18.001866 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:18Z","lastTransitionTime":"2025-10-07T07:57:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:18 crc kubenswrapper[4875]: I1007 07:57:18.105104 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:18 crc kubenswrapper[4875]: I1007 07:57:18.105148 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:18 crc kubenswrapper[4875]: I1007 07:57:18.105160 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:18 crc kubenswrapper[4875]: I1007 07:57:18.105174 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:18 crc kubenswrapper[4875]: I1007 07:57:18.105184 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:18Z","lastTransitionTime":"2025-10-07T07:57:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:18 crc kubenswrapper[4875]: I1007 07:57:18.208031 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:18 crc kubenswrapper[4875]: I1007 07:57:18.208095 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:18 crc kubenswrapper[4875]: I1007 07:57:18.208112 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:18 crc kubenswrapper[4875]: I1007 07:57:18.208134 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:18 crc kubenswrapper[4875]: I1007 07:57:18.208178 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:18Z","lastTransitionTime":"2025-10-07T07:57:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:18 crc kubenswrapper[4875]: I1007 07:57:18.310681 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:18 crc kubenswrapper[4875]: I1007 07:57:18.310753 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:18 crc kubenswrapper[4875]: I1007 07:57:18.310763 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:18 crc kubenswrapper[4875]: I1007 07:57:18.310781 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:18 crc kubenswrapper[4875]: I1007 07:57:18.310794 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:18Z","lastTransitionTime":"2025-10-07T07:57:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:18 crc kubenswrapper[4875]: I1007 07:57:18.413570 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:18 crc kubenswrapper[4875]: I1007 07:57:18.413629 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:18 crc kubenswrapper[4875]: I1007 07:57:18.413647 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:18 crc kubenswrapper[4875]: I1007 07:57:18.413669 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:18 crc kubenswrapper[4875]: I1007 07:57:18.413685 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:18Z","lastTransitionTime":"2025-10-07T07:57:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:18 crc kubenswrapper[4875]: I1007 07:57:18.516105 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:18 crc kubenswrapper[4875]: I1007 07:57:18.516140 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:18 crc kubenswrapper[4875]: I1007 07:57:18.516149 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:18 crc kubenswrapper[4875]: I1007 07:57:18.516161 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:18 crc kubenswrapper[4875]: I1007 07:57:18.516169 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:18Z","lastTransitionTime":"2025-10-07T07:57:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:18 crc kubenswrapper[4875]: I1007 07:57:18.618567 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:18 crc kubenswrapper[4875]: I1007 07:57:18.618605 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:18 crc kubenswrapper[4875]: I1007 07:57:18.618615 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:18 crc kubenswrapper[4875]: I1007 07:57:18.618627 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:18 crc kubenswrapper[4875]: I1007 07:57:18.618636 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:18Z","lastTransitionTime":"2025-10-07T07:57:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:18 crc kubenswrapper[4875]: I1007 07:57:18.696526 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-wk8rw" Oct 07 07:57:18 crc kubenswrapper[4875]: E1007 07:57:18.696650 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-wk8rw" podUID="dce21abc-1295-4d45-bd26-07b7e37d674c" Oct 07 07:57:18 crc kubenswrapper[4875]: I1007 07:57:18.696721 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 07:57:18 crc kubenswrapper[4875]: E1007 07:57:18.696856 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 07:57:18 crc kubenswrapper[4875]: I1007 07:57:18.721230 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:18 crc kubenswrapper[4875]: I1007 07:57:18.721310 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:18 crc kubenswrapper[4875]: I1007 07:57:18.721335 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:18 crc kubenswrapper[4875]: I1007 07:57:18.721369 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:18 crc kubenswrapper[4875]: I1007 07:57:18.721389 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:18Z","lastTransitionTime":"2025-10-07T07:57:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:18 crc kubenswrapper[4875]: I1007 07:57:18.824540 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:18 crc kubenswrapper[4875]: I1007 07:57:18.824605 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:18 crc kubenswrapper[4875]: I1007 07:57:18.824616 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:18 crc kubenswrapper[4875]: I1007 07:57:18.824629 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:18 crc kubenswrapper[4875]: I1007 07:57:18.824637 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:18Z","lastTransitionTime":"2025-10-07T07:57:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:18 crc kubenswrapper[4875]: I1007 07:57:18.926820 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:18 crc kubenswrapper[4875]: I1007 07:57:18.926851 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:18 crc kubenswrapper[4875]: I1007 07:57:18.926859 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:18 crc kubenswrapper[4875]: I1007 07:57:18.926873 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:18 crc kubenswrapper[4875]: I1007 07:57:18.926894 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:18Z","lastTransitionTime":"2025-10-07T07:57:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:19 crc kubenswrapper[4875]: I1007 07:57:19.029178 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:19 crc kubenswrapper[4875]: I1007 07:57:19.029263 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:19 crc kubenswrapper[4875]: I1007 07:57:19.029336 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:19 crc kubenswrapper[4875]: I1007 07:57:19.029384 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:19 crc kubenswrapper[4875]: I1007 07:57:19.029401 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:19Z","lastTransitionTime":"2025-10-07T07:57:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:19 crc kubenswrapper[4875]: I1007 07:57:19.132621 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:19 crc kubenswrapper[4875]: I1007 07:57:19.132669 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:19 crc kubenswrapper[4875]: I1007 07:57:19.132682 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:19 crc kubenswrapper[4875]: I1007 07:57:19.132702 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:19 crc kubenswrapper[4875]: I1007 07:57:19.132716 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:19Z","lastTransitionTime":"2025-10-07T07:57:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:19 crc kubenswrapper[4875]: I1007 07:57:19.235331 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:19 crc kubenswrapper[4875]: I1007 07:57:19.235407 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:19 crc kubenswrapper[4875]: I1007 07:57:19.235417 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:19 crc kubenswrapper[4875]: I1007 07:57:19.235432 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:19 crc kubenswrapper[4875]: I1007 07:57:19.235443 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:19Z","lastTransitionTime":"2025-10-07T07:57:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:19 crc kubenswrapper[4875]: I1007 07:57:19.338144 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:19 crc kubenswrapper[4875]: I1007 07:57:19.338197 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:19 crc kubenswrapper[4875]: I1007 07:57:19.338209 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:19 crc kubenswrapper[4875]: I1007 07:57:19.338226 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:19 crc kubenswrapper[4875]: I1007 07:57:19.338237 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:19Z","lastTransitionTime":"2025-10-07T07:57:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:19 crc kubenswrapper[4875]: I1007 07:57:19.440663 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:19 crc kubenswrapper[4875]: I1007 07:57:19.440725 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:19 crc kubenswrapper[4875]: I1007 07:57:19.440736 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:19 crc kubenswrapper[4875]: I1007 07:57:19.440753 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:19 crc kubenswrapper[4875]: I1007 07:57:19.440785 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:19Z","lastTransitionTime":"2025-10-07T07:57:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:19 crc kubenswrapper[4875]: I1007 07:57:19.543149 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:19 crc kubenswrapper[4875]: I1007 07:57:19.543187 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:19 crc kubenswrapper[4875]: I1007 07:57:19.543199 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:19 crc kubenswrapper[4875]: I1007 07:57:19.543213 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:19 crc kubenswrapper[4875]: I1007 07:57:19.543223 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:19Z","lastTransitionTime":"2025-10-07T07:57:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:19 crc kubenswrapper[4875]: I1007 07:57:19.645455 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:19 crc kubenswrapper[4875]: I1007 07:57:19.645571 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:19 crc kubenswrapper[4875]: I1007 07:57:19.645580 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:19 crc kubenswrapper[4875]: I1007 07:57:19.645594 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:19 crc kubenswrapper[4875]: I1007 07:57:19.645603 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:19Z","lastTransitionTime":"2025-10-07T07:57:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:19 crc kubenswrapper[4875]: I1007 07:57:19.696863 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 07:57:19 crc kubenswrapper[4875]: E1007 07:57:19.697999 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 07:57:19 crc kubenswrapper[4875]: I1007 07:57:19.698276 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 07:57:19 crc kubenswrapper[4875]: E1007 07:57:19.698332 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 07:57:19 crc kubenswrapper[4875]: I1007 07:57:19.748114 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:19 crc kubenswrapper[4875]: I1007 07:57:19.748149 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:19 crc kubenswrapper[4875]: I1007 07:57:19.748157 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:19 crc kubenswrapper[4875]: I1007 07:57:19.748172 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:19 crc kubenswrapper[4875]: I1007 07:57:19.748180 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:19Z","lastTransitionTime":"2025-10-07T07:57:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:19 crc kubenswrapper[4875]: I1007 07:57:19.850462 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:19 crc kubenswrapper[4875]: I1007 07:57:19.850494 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:19 crc kubenswrapper[4875]: I1007 07:57:19.850502 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:19 crc kubenswrapper[4875]: I1007 07:57:19.850517 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:19 crc kubenswrapper[4875]: I1007 07:57:19.850526 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:19Z","lastTransitionTime":"2025-10-07T07:57:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:19 crc kubenswrapper[4875]: I1007 07:57:19.954603 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:19 crc kubenswrapper[4875]: I1007 07:57:19.954649 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:19 crc kubenswrapper[4875]: I1007 07:57:19.954659 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:19 crc kubenswrapper[4875]: I1007 07:57:19.954680 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:19 crc kubenswrapper[4875]: I1007 07:57:19.954690 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:19Z","lastTransitionTime":"2025-10-07T07:57:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:20 crc kubenswrapper[4875]: I1007 07:57:20.057915 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:20 crc kubenswrapper[4875]: I1007 07:57:20.057954 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:20 crc kubenswrapper[4875]: I1007 07:57:20.057963 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:20 crc kubenswrapper[4875]: I1007 07:57:20.057979 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:20 crc kubenswrapper[4875]: I1007 07:57:20.057990 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:20Z","lastTransitionTime":"2025-10-07T07:57:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:20 crc kubenswrapper[4875]: I1007 07:57:20.160945 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:20 crc kubenswrapper[4875]: I1007 07:57:20.160985 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:20 crc kubenswrapper[4875]: I1007 07:57:20.160994 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:20 crc kubenswrapper[4875]: I1007 07:57:20.161009 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:20 crc kubenswrapper[4875]: I1007 07:57:20.161018 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:20Z","lastTransitionTime":"2025-10-07T07:57:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:20 crc kubenswrapper[4875]: I1007 07:57:20.263393 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:20 crc kubenswrapper[4875]: I1007 07:57:20.263441 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:20 crc kubenswrapper[4875]: I1007 07:57:20.263455 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:20 crc kubenswrapper[4875]: I1007 07:57:20.263473 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:20 crc kubenswrapper[4875]: I1007 07:57:20.263488 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:20Z","lastTransitionTime":"2025-10-07T07:57:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:20 crc kubenswrapper[4875]: I1007 07:57:20.365283 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:20 crc kubenswrapper[4875]: I1007 07:57:20.365318 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:20 crc kubenswrapper[4875]: I1007 07:57:20.365330 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:20 crc kubenswrapper[4875]: I1007 07:57:20.365392 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:20 crc kubenswrapper[4875]: I1007 07:57:20.365406 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:20Z","lastTransitionTime":"2025-10-07T07:57:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:20 crc kubenswrapper[4875]: I1007 07:57:20.467346 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:20 crc kubenswrapper[4875]: I1007 07:57:20.467381 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:20 crc kubenswrapper[4875]: I1007 07:57:20.467392 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:20 crc kubenswrapper[4875]: I1007 07:57:20.467409 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:20 crc kubenswrapper[4875]: I1007 07:57:20.467421 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:20Z","lastTransitionTime":"2025-10-07T07:57:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:20 crc kubenswrapper[4875]: I1007 07:57:20.569640 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:20 crc kubenswrapper[4875]: I1007 07:57:20.569681 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:20 crc kubenswrapper[4875]: I1007 07:57:20.569692 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:20 crc kubenswrapper[4875]: I1007 07:57:20.569707 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:20 crc kubenswrapper[4875]: I1007 07:57:20.569718 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:20Z","lastTransitionTime":"2025-10-07T07:57:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:20 crc kubenswrapper[4875]: I1007 07:57:20.672256 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:20 crc kubenswrapper[4875]: I1007 07:57:20.672291 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:20 crc kubenswrapper[4875]: I1007 07:57:20.672300 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:20 crc kubenswrapper[4875]: I1007 07:57:20.672341 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:20 crc kubenswrapper[4875]: I1007 07:57:20.672369 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:20Z","lastTransitionTime":"2025-10-07T07:57:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:20 crc kubenswrapper[4875]: I1007 07:57:20.696751 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-wk8rw" Oct 07 07:57:20 crc kubenswrapper[4875]: E1007 07:57:20.696866 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-wk8rw" podUID="dce21abc-1295-4d45-bd26-07b7e37d674c" Oct 07 07:57:20 crc kubenswrapper[4875]: I1007 07:57:20.696751 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 07:57:20 crc kubenswrapper[4875]: E1007 07:57:20.696984 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 07:57:20 crc kubenswrapper[4875]: I1007 07:57:20.774805 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:20 crc kubenswrapper[4875]: I1007 07:57:20.774849 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:20 crc kubenswrapper[4875]: I1007 07:57:20.774857 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:20 crc kubenswrapper[4875]: I1007 07:57:20.774872 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:20 crc kubenswrapper[4875]: I1007 07:57:20.774899 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:20Z","lastTransitionTime":"2025-10-07T07:57:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:20 crc kubenswrapper[4875]: I1007 07:57:20.877699 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:20 crc kubenswrapper[4875]: I1007 07:57:20.877744 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:20 crc kubenswrapper[4875]: I1007 07:57:20.877782 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:20 crc kubenswrapper[4875]: I1007 07:57:20.877801 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:20 crc kubenswrapper[4875]: I1007 07:57:20.877819 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:20Z","lastTransitionTime":"2025-10-07T07:57:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:20 crc kubenswrapper[4875]: I1007 07:57:20.979959 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:20 crc kubenswrapper[4875]: I1007 07:57:20.980000 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:20 crc kubenswrapper[4875]: I1007 07:57:20.980009 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:20 crc kubenswrapper[4875]: I1007 07:57:20.980025 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:20 crc kubenswrapper[4875]: I1007 07:57:20.980037 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:20Z","lastTransitionTime":"2025-10-07T07:57:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:21 crc kubenswrapper[4875]: I1007 07:57:21.083259 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:21 crc kubenswrapper[4875]: I1007 07:57:21.083303 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:21 crc kubenswrapper[4875]: I1007 07:57:21.083315 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:21 crc kubenswrapper[4875]: I1007 07:57:21.083331 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:21 crc kubenswrapper[4875]: I1007 07:57:21.083340 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:21Z","lastTransitionTime":"2025-10-07T07:57:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:21 crc kubenswrapper[4875]: I1007 07:57:21.185217 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:21 crc kubenswrapper[4875]: I1007 07:57:21.185306 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:21 crc kubenswrapper[4875]: I1007 07:57:21.185334 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:21 crc kubenswrapper[4875]: I1007 07:57:21.185378 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:21 crc kubenswrapper[4875]: I1007 07:57:21.185406 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:21Z","lastTransitionTime":"2025-10-07T07:57:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:21 crc kubenswrapper[4875]: I1007 07:57:21.288513 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:21 crc kubenswrapper[4875]: I1007 07:57:21.288571 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:21 crc kubenswrapper[4875]: I1007 07:57:21.288581 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:21 crc kubenswrapper[4875]: I1007 07:57:21.288602 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:21 crc kubenswrapper[4875]: I1007 07:57:21.288619 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:21Z","lastTransitionTime":"2025-10-07T07:57:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:21 crc kubenswrapper[4875]: I1007 07:57:21.391700 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:21 crc kubenswrapper[4875]: I1007 07:57:21.391747 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:21 crc kubenswrapper[4875]: I1007 07:57:21.391760 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:21 crc kubenswrapper[4875]: I1007 07:57:21.391781 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:21 crc kubenswrapper[4875]: I1007 07:57:21.391793 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:21Z","lastTransitionTime":"2025-10-07T07:57:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:21 crc kubenswrapper[4875]: I1007 07:57:21.495049 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:21 crc kubenswrapper[4875]: I1007 07:57:21.495122 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:21 crc kubenswrapper[4875]: I1007 07:57:21.495144 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:21 crc kubenswrapper[4875]: I1007 07:57:21.495179 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:21 crc kubenswrapper[4875]: I1007 07:57:21.495201 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:21Z","lastTransitionTime":"2025-10-07T07:57:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:21 crc kubenswrapper[4875]: I1007 07:57:21.598303 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:21 crc kubenswrapper[4875]: I1007 07:57:21.598345 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:21 crc kubenswrapper[4875]: I1007 07:57:21.598355 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:21 crc kubenswrapper[4875]: I1007 07:57:21.598371 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:21 crc kubenswrapper[4875]: I1007 07:57:21.598384 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:21Z","lastTransitionTime":"2025-10-07T07:57:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:21 crc kubenswrapper[4875]: I1007 07:57:21.696852 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 07:57:21 crc kubenswrapper[4875]: I1007 07:57:21.696918 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 07:57:21 crc kubenswrapper[4875]: E1007 07:57:21.697049 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 07:57:21 crc kubenswrapper[4875]: E1007 07:57:21.697108 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 07:57:21 crc kubenswrapper[4875]: I1007 07:57:21.700651 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:21 crc kubenswrapper[4875]: I1007 07:57:21.700684 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:21 crc kubenswrapper[4875]: I1007 07:57:21.700695 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:21 crc kubenswrapper[4875]: I1007 07:57:21.700710 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:21 crc kubenswrapper[4875]: I1007 07:57:21.700721 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:21Z","lastTransitionTime":"2025-10-07T07:57:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:21 crc kubenswrapper[4875]: I1007 07:57:21.803851 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:21 crc kubenswrapper[4875]: I1007 07:57:21.803906 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:21 crc kubenswrapper[4875]: I1007 07:57:21.803915 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:21 crc kubenswrapper[4875]: I1007 07:57:21.803929 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:21 crc kubenswrapper[4875]: I1007 07:57:21.803938 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:21Z","lastTransitionTime":"2025-10-07T07:57:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:21 crc kubenswrapper[4875]: I1007 07:57:21.907003 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:21 crc kubenswrapper[4875]: I1007 07:57:21.907040 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:21 crc kubenswrapper[4875]: I1007 07:57:21.907050 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:21 crc kubenswrapper[4875]: I1007 07:57:21.907067 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:21 crc kubenswrapper[4875]: I1007 07:57:21.907079 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:21Z","lastTransitionTime":"2025-10-07T07:57:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:22 crc kubenswrapper[4875]: I1007 07:57:22.009230 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:22 crc kubenswrapper[4875]: I1007 07:57:22.009271 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:22 crc kubenswrapper[4875]: I1007 07:57:22.009282 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:22 crc kubenswrapper[4875]: I1007 07:57:22.009297 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:22 crc kubenswrapper[4875]: I1007 07:57:22.009309 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:22Z","lastTransitionTime":"2025-10-07T07:57:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:22 crc kubenswrapper[4875]: I1007 07:57:22.111620 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:22 crc kubenswrapper[4875]: I1007 07:57:22.111651 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:22 crc kubenswrapper[4875]: I1007 07:57:22.111662 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:22 crc kubenswrapper[4875]: I1007 07:57:22.111676 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:22 crc kubenswrapper[4875]: I1007 07:57:22.111685 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:22Z","lastTransitionTime":"2025-10-07T07:57:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:22 crc kubenswrapper[4875]: I1007 07:57:22.214701 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:22 crc kubenswrapper[4875]: I1007 07:57:22.214775 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:22 crc kubenswrapper[4875]: I1007 07:57:22.214794 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:22 crc kubenswrapper[4875]: I1007 07:57:22.214823 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:22 crc kubenswrapper[4875]: I1007 07:57:22.214844 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:22Z","lastTransitionTime":"2025-10-07T07:57:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:22 crc kubenswrapper[4875]: I1007 07:57:22.318666 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:22 crc kubenswrapper[4875]: I1007 07:57:22.318743 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:22 crc kubenswrapper[4875]: I1007 07:57:22.318759 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:22 crc kubenswrapper[4875]: I1007 07:57:22.318789 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:22 crc kubenswrapper[4875]: I1007 07:57:22.318805 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:22Z","lastTransitionTime":"2025-10-07T07:57:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:22 crc kubenswrapper[4875]: I1007 07:57:22.422308 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:22 crc kubenswrapper[4875]: I1007 07:57:22.422366 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:22 crc kubenswrapper[4875]: I1007 07:57:22.422378 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:22 crc kubenswrapper[4875]: I1007 07:57:22.422402 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:22 crc kubenswrapper[4875]: I1007 07:57:22.422414 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:22Z","lastTransitionTime":"2025-10-07T07:57:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:22 crc kubenswrapper[4875]: I1007 07:57:22.525698 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:22 crc kubenswrapper[4875]: I1007 07:57:22.525727 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:22 crc kubenswrapper[4875]: I1007 07:57:22.525736 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:22 crc kubenswrapper[4875]: I1007 07:57:22.525750 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:22 crc kubenswrapper[4875]: I1007 07:57:22.525758 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:22Z","lastTransitionTime":"2025-10-07T07:57:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:22 crc kubenswrapper[4875]: I1007 07:57:22.628973 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:22 crc kubenswrapper[4875]: I1007 07:57:22.629030 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:22 crc kubenswrapper[4875]: I1007 07:57:22.629040 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:22 crc kubenswrapper[4875]: I1007 07:57:22.629058 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:22 crc kubenswrapper[4875]: I1007 07:57:22.629070 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:22Z","lastTransitionTime":"2025-10-07T07:57:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:22 crc kubenswrapper[4875]: I1007 07:57:22.696978 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-wk8rw" Oct 07 07:57:22 crc kubenswrapper[4875]: I1007 07:57:22.697092 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 07:57:22 crc kubenswrapper[4875]: E1007 07:57:22.697370 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-wk8rw" podUID="dce21abc-1295-4d45-bd26-07b7e37d674c" Oct 07 07:57:22 crc kubenswrapper[4875]: E1007 07:57:22.697543 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 07:57:22 crc kubenswrapper[4875]: I1007 07:57:22.708515 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc"] Oct 07 07:57:22 crc kubenswrapper[4875]: I1007 07:57:22.731751 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:22 crc kubenswrapper[4875]: I1007 07:57:22.731800 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:22 crc kubenswrapper[4875]: I1007 07:57:22.731815 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:22 crc kubenswrapper[4875]: I1007 07:57:22.731835 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:22 crc kubenswrapper[4875]: I1007 07:57:22.731846 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:22Z","lastTransitionTime":"2025-10-07T07:57:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:22 crc kubenswrapper[4875]: I1007 07:57:22.743347 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/dce21abc-1295-4d45-bd26-07b7e37d674c-metrics-certs\") pod \"network-metrics-daemon-wk8rw\" (UID: \"dce21abc-1295-4d45-bd26-07b7e37d674c\") " pod="openshift-multus/network-metrics-daemon-wk8rw" Oct 07 07:57:22 crc kubenswrapper[4875]: E1007 07:57:22.743522 4875 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 07 07:57:22 crc kubenswrapper[4875]: E1007 07:57:22.743595 4875 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/dce21abc-1295-4d45-bd26-07b7e37d674c-metrics-certs podName:dce21abc-1295-4d45-bd26-07b7e37d674c nodeName:}" failed. No retries permitted until 2025-10-07 07:57:54.743572238 +0000 UTC m=+99.703342851 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/dce21abc-1295-4d45-bd26-07b7e37d674c-metrics-certs") pod "network-metrics-daemon-wk8rw" (UID: "dce21abc-1295-4d45-bd26-07b7e37d674c") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 07 07:57:22 crc kubenswrapper[4875]: I1007 07:57:22.833633 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:22 crc kubenswrapper[4875]: I1007 07:57:22.833668 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:22 crc kubenswrapper[4875]: I1007 07:57:22.833676 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:22 crc kubenswrapper[4875]: I1007 07:57:22.833690 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:22 crc kubenswrapper[4875]: I1007 07:57:22.833699 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:22Z","lastTransitionTime":"2025-10-07T07:57:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:22 crc kubenswrapper[4875]: I1007 07:57:22.878354 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:22 crc kubenswrapper[4875]: I1007 07:57:22.878410 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:22 crc kubenswrapper[4875]: I1007 07:57:22.878421 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:22 crc kubenswrapper[4875]: I1007 07:57:22.878437 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:22 crc kubenswrapper[4875]: I1007 07:57:22.878448 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:22Z","lastTransitionTime":"2025-10-07T07:57:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:22 crc kubenswrapper[4875]: E1007 07:57:22.890191 4875 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T07:57:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:22Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T07:57:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:22Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T07:57:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:22Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T07:57:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:22Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"e390eb55-bfef-4c82-ba02-53426a3fd939\\\",\\\"systemUUID\\\":\\\"288fc606-a984-4194-ac49-303e4a239cb9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:22Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:22 crc kubenswrapper[4875]: I1007 07:57:22.892982 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:22 crc kubenswrapper[4875]: I1007 07:57:22.893021 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:22 crc kubenswrapper[4875]: I1007 07:57:22.893033 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:22 crc kubenswrapper[4875]: I1007 07:57:22.893051 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:22 crc kubenswrapper[4875]: I1007 07:57:22.893062 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:22Z","lastTransitionTime":"2025-10-07T07:57:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:22 crc kubenswrapper[4875]: E1007 07:57:22.909050 4875 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T07:57:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:22Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T07:57:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:22Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T07:57:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:22Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T07:57:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:22Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"e390eb55-bfef-4c82-ba02-53426a3fd939\\\",\\\"systemUUID\\\":\\\"288fc606-a984-4194-ac49-303e4a239cb9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:22Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:22 crc kubenswrapper[4875]: I1007 07:57:22.912227 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:22 crc kubenswrapper[4875]: I1007 07:57:22.912336 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:22 crc kubenswrapper[4875]: I1007 07:57:22.912427 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:22 crc kubenswrapper[4875]: I1007 07:57:22.912509 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:22 crc kubenswrapper[4875]: I1007 07:57:22.912600 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:22Z","lastTransitionTime":"2025-10-07T07:57:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:22 crc kubenswrapper[4875]: E1007 07:57:22.924497 4875 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T07:57:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:22Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T07:57:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:22Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T07:57:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:22Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T07:57:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:22Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"e390eb55-bfef-4c82-ba02-53426a3fd939\\\",\\\"systemUUID\\\":\\\"288fc606-a984-4194-ac49-303e4a239cb9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:22Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:22 crc kubenswrapper[4875]: I1007 07:57:22.928599 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:22 crc kubenswrapper[4875]: I1007 07:57:22.928634 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:22 crc kubenswrapper[4875]: I1007 07:57:22.928646 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:22 crc kubenswrapper[4875]: I1007 07:57:22.928659 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:22 crc kubenswrapper[4875]: I1007 07:57:22.928671 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:22Z","lastTransitionTime":"2025-10-07T07:57:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:22 crc kubenswrapper[4875]: E1007 07:57:22.938345 4875 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T07:57:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:22Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T07:57:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:22Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T07:57:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:22Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T07:57:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:22Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"e390eb55-bfef-4c82-ba02-53426a3fd939\\\",\\\"systemUUID\\\":\\\"288fc606-a984-4194-ac49-303e4a239cb9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:22Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:22 crc kubenswrapper[4875]: I1007 07:57:22.941527 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:22 crc kubenswrapper[4875]: I1007 07:57:22.941598 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:22 crc kubenswrapper[4875]: I1007 07:57:22.941617 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:22 crc kubenswrapper[4875]: I1007 07:57:22.941648 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:22 crc kubenswrapper[4875]: I1007 07:57:22.941667 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:22Z","lastTransitionTime":"2025-10-07T07:57:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:22 crc kubenswrapper[4875]: E1007 07:57:22.952925 4875 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T07:57:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:22Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T07:57:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:22Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T07:57:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:22Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T07:57:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:22Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"e390eb55-bfef-4c82-ba02-53426a3fd939\\\",\\\"systemUUID\\\":\\\"288fc606-a984-4194-ac49-303e4a239cb9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:22Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:22 crc kubenswrapper[4875]: E1007 07:57:22.953045 4875 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Oct 07 07:57:22 crc kubenswrapper[4875]: I1007 07:57:22.954907 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:22 crc kubenswrapper[4875]: I1007 07:57:22.954932 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:22 crc kubenswrapper[4875]: I1007 07:57:22.954941 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:22 crc kubenswrapper[4875]: I1007 07:57:22.954960 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:22 crc kubenswrapper[4875]: I1007 07:57:22.954972 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:22Z","lastTransitionTime":"2025-10-07T07:57:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:23 crc kubenswrapper[4875]: I1007 07:57:23.057601 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:23 crc kubenswrapper[4875]: I1007 07:57:23.057635 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:23 crc kubenswrapper[4875]: I1007 07:57:23.057643 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:23 crc kubenswrapper[4875]: I1007 07:57:23.057657 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:23 crc kubenswrapper[4875]: I1007 07:57:23.057668 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:23Z","lastTransitionTime":"2025-10-07T07:57:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:23 crc kubenswrapper[4875]: I1007 07:57:23.160262 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:23 crc kubenswrapper[4875]: I1007 07:57:23.160298 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:23 crc kubenswrapper[4875]: I1007 07:57:23.160307 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:23 crc kubenswrapper[4875]: I1007 07:57:23.160320 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:23 crc kubenswrapper[4875]: I1007 07:57:23.160329 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:23Z","lastTransitionTime":"2025-10-07T07:57:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:23 crc kubenswrapper[4875]: I1007 07:57:23.262597 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:23 crc kubenswrapper[4875]: I1007 07:57:23.262661 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:23 crc kubenswrapper[4875]: I1007 07:57:23.262671 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:23 crc kubenswrapper[4875]: I1007 07:57:23.262686 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:23 crc kubenswrapper[4875]: I1007 07:57:23.262696 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:23Z","lastTransitionTime":"2025-10-07T07:57:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:23 crc kubenswrapper[4875]: I1007 07:57:23.365229 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:23 crc kubenswrapper[4875]: I1007 07:57:23.365273 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:23 crc kubenswrapper[4875]: I1007 07:57:23.365283 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:23 crc kubenswrapper[4875]: I1007 07:57:23.365299 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:23 crc kubenswrapper[4875]: I1007 07:57:23.365310 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:23Z","lastTransitionTime":"2025-10-07T07:57:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:23 crc kubenswrapper[4875]: I1007 07:57:23.468687 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:23 crc kubenswrapper[4875]: I1007 07:57:23.468720 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:23 crc kubenswrapper[4875]: I1007 07:57:23.468728 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:23 crc kubenswrapper[4875]: I1007 07:57:23.468747 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:23 crc kubenswrapper[4875]: I1007 07:57:23.468757 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:23Z","lastTransitionTime":"2025-10-07T07:57:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:23 crc kubenswrapper[4875]: I1007 07:57:23.571185 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:23 crc kubenswrapper[4875]: I1007 07:57:23.571234 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:23 crc kubenswrapper[4875]: I1007 07:57:23.571246 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:23 crc kubenswrapper[4875]: I1007 07:57:23.571262 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:23 crc kubenswrapper[4875]: I1007 07:57:23.571273 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:23Z","lastTransitionTime":"2025-10-07T07:57:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:23 crc kubenswrapper[4875]: I1007 07:57:23.673595 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:23 crc kubenswrapper[4875]: I1007 07:57:23.673654 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:23 crc kubenswrapper[4875]: I1007 07:57:23.673666 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:23 crc kubenswrapper[4875]: I1007 07:57:23.673686 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:23 crc kubenswrapper[4875]: I1007 07:57:23.673701 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:23Z","lastTransitionTime":"2025-10-07T07:57:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:23 crc kubenswrapper[4875]: I1007 07:57:23.697426 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 07:57:23 crc kubenswrapper[4875]: I1007 07:57:23.697480 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 07:57:23 crc kubenswrapper[4875]: E1007 07:57:23.697730 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 07:57:23 crc kubenswrapper[4875]: E1007 07:57:23.697908 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 07:57:23 crc kubenswrapper[4875]: I1007 07:57:23.698780 4875 scope.go:117] "RemoveContainer" containerID="75be2ad687d37c776e821aa271a95c7dda1c9bd5d9c705202fb41c7d27d8c3eb" Oct 07 07:57:23 crc kubenswrapper[4875]: E1007 07:57:23.699052 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-8tcxj_openshift-ovn-kubernetes(f7806e48-48e7-4680-af2e-e93a05003370)\"" pod="openshift-ovn-kubernetes/ovnkube-node-8tcxj" podUID="f7806e48-48e7-4680-af2e-e93a05003370" Oct 07 07:57:23 crc kubenswrapper[4875]: I1007 07:57:23.776495 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:23 crc kubenswrapper[4875]: I1007 07:57:23.776556 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:23 crc kubenswrapper[4875]: I1007 07:57:23.776568 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:23 crc kubenswrapper[4875]: I1007 07:57:23.776588 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:23 crc kubenswrapper[4875]: I1007 07:57:23.776605 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:23Z","lastTransitionTime":"2025-10-07T07:57:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:23 crc kubenswrapper[4875]: I1007 07:57:23.879030 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:23 crc kubenswrapper[4875]: I1007 07:57:23.879093 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:23 crc kubenswrapper[4875]: I1007 07:57:23.879105 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:23 crc kubenswrapper[4875]: I1007 07:57:23.879120 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:23 crc kubenswrapper[4875]: I1007 07:57:23.879150 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:23Z","lastTransitionTime":"2025-10-07T07:57:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:23 crc kubenswrapper[4875]: I1007 07:57:23.981710 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:23 crc kubenswrapper[4875]: I1007 07:57:23.981751 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:23 crc kubenswrapper[4875]: I1007 07:57:23.981760 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:23 crc kubenswrapper[4875]: I1007 07:57:23.981776 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:23 crc kubenswrapper[4875]: I1007 07:57:23.981785 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:23Z","lastTransitionTime":"2025-10-07T07:57:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:24 crc kubenswrapper[4875]: I1007 07:57:24.084246 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:24 crc kubenswrapper[4875]: I1007 07:57:24.084294 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:24 crc kubenswrapper[4875]: I1007 07:57:24.084306 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:24 crc kubenswrapper[4875]: I1007 07:57:24.084325 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:24 crc kubenswrapper[4875]: I1007 07:57:24.084335 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:24Z","lastTransitionTime":"2025-10-07T07:57:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:24 crc kubenswrapper[4875]: I1007 07:57:24.186033 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:24 crc kubenswrapper[4875]: I1007 07:57:24.186067 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:24 crc kubenswrapper[4875]: I1007 07:57:24.186075 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:24 crc kubenswrapper[4875]: I1007 07:57:24.186092 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:24 crc kubenswrapper[4875]: I1007 07:57:24.186101 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:24Z","lastTransitionTime":"2025-10-07T07:57:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:24 crc kubenswrapper[4875]: I1007 07:57:24.288601 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:24 crc kubenswrapper[4875]: I1007 07:57:24.288645 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:24 crc kubenswrapper[4875]: I1007 07:57:24.288656 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:24 crc kubenswrapper[4875]: I1007 07:57:24.288675 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:24 crc kubenswrapper[4875]: I1007 07:57:24.288687 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:24Z","lastTransitionTime":"2025-10-07T07:57:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:24 crc kubenswrapper[4875]: I1007 07:57:24.390371 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:24 crc kubenswrapper[4875]: I1007 07:57:24.390406 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:24 crc kubenswrapper[4875]: I1007 07:57:24.390415 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:24 crc kubenswrapper[4875]: I1007 07:57:24.390430 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:24 crc kubenswrapper[4875]: I1007 07:57:24.390440 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:24Z","lastTransitionTime":"2025-10-07T07:57:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:24 crc kubenswrapper[4875]: I1007 07:57:24.492629 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:24 crc kubenswrapper[4875]: I1007 07:57:24.492667 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:24 crc kubenswrapper[4875]: I1007 07:57:24.492679 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:24 crc kubenswrapper[4875]: I1007 07:57:24.492694 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:24 crc kubenswrapper[4875]: I1007 07:57:24.492731 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:24Z","lastTransitionTime":"2025-10-07T07:57:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:24 crc kubenswrapper[4875]: I1007 07:57:24.595432 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:24 crc kubenswrapper[4875]: I1007 07:57:24.595480 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:24 crc kubenswrapper[4875]: I1007 07:57:24.595492 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:24 crc kubenswrapper[4875]: I1007 07:57:24.595508 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:24 crc kubenswrapper[4875]: I1007 07:57:24.595520 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:24Z","lastTransitionTime":"2025-10-07T07:57:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:24 crc kubenswrapper[4875]: I1007 07:57:24.697093 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 07:57:24 crc kubenswrapper[4875]: I1007 07:57:24.697162 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-wk8rw" Oct 07 07:57:24 crc kubenswrapper[4875]: E1007 07:57:24.697229 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 07:57:24 crc kubenswrapper[4875]: E1007 07:57:24.697425 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-wk8rw" podUID="dce21abc-1295-4d45-bd26-07b7e37d674c" Oct 07 07:57:24 crc kubenswrapper[4875]: I1007 07:57:24.698622 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:24 crc kubenswrapper[4875]: I1007 07:57:24.698647 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:24 crc kubenswrapper[4875]: I1007 07:57:24.698658 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:24 crc kubenswrapper[4875]: I1007 07:57:24.698672 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:24 crc kubenswrapper[4875]: I1007 07:57:24.698683 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:24Z","lastTransitionTime":"2025-10-07T07:57:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:24 crc kubenswrapper[4875]: I1007 07:57:24.800651 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:24 crc kubenswrapper[4875]: I1007 07:57:24.800702 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:24 crc kubenswrapper[4875]: I1007 07:57:24.800711 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:24 crc kubenswrapper[4875]: I1007 07:57:24.800726 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:24 crc kubenswrapper[4875]: I1007 07:57:24.800736 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:24Z","lastTransitionTime":"2025-10-07T07:57:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:24 crc kubenswrapper[4875]: I1007 07:57:24.903045 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:24 crc kubenswrapper[4875]: I1007 07:57:24.903087 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:24 crc kubenswrapper[4875]: I1007 07:57:24.903098 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:24 crc kubenswrapper[4875]: I1007 07:57:24.903115 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:24 crc kubenswrapper[4875]: I1007 07:57:24.903125 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:24Z","lastTransitionTime":"2025-10-07T07:57:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:25 crc kubenswrapper[4875]: I1007 07:57:25.005419 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:25 crc kubenswrapper[4875]: I1007 07:57:25.005463 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:25 crc kubenswrapper[4875]: I1007 07:57:25.005474 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:25 crc kubenswrapper[4875]: I1007 07:57:25.005491 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:25 crc kubenswrapper[4875]: I1007 07:57:25.005502 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:25Z","lastTransitionTime":"2025-10-07T07:57:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:25 crc kubenswrapper[4875]: I1007 07:57:25.107439 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:25 crc kubenswrapper[4875]: I1007 07:57:25.107526 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:25 crc kubenswrapper[4875]: I1007 07:57:25.107537 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:25 crc kubenswrapper[4875]: I1007 07:57:25.107552 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:25 crc kubenswrapper[4875]: I1007 07:57:25.107562 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:25Z","lastTransitionTime":"2025-10-07T07:57:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:25 crc kubenswrapper[4875]: I1007 07:57:25.157633 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-wc2jq_e5a790e1-c591-4cfc-930f-4805a923790b/kube-multus/0.log" Oct 07 07:57:25 crc kubenswrapper[4875]: I1007 07:57:25.157706 4875 generic.go:334] "Generic (PLEG): container finished" podID="e5a790e1-c591-4cfc-930f-4805a923790b" containerID="0fda9fd366641ca5799c0ca6c7aa926af6b41609e157c11a15b2077d4a5ce3aa" exitCode=1 Oct 07 07:57:25 crc kubenswrapper[4875]: I1007 07:57:25.157749 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-wc2jq" event={"ID":"e5a790e1-c591-4cfc-930f-4805a923790b","Type":"ContainerDied","Data":"0fda9fd366641ca5799c0ca6c7aa926af6b41609e157c11a15b2077d4a5ce3aa"} Oct 07 07:57:25 crc kubenswrapper[4875]: I1007 07:57:25.158275 4875 scope.go:117] "RemoveContainer" containerID="0fda9fd366641ca5799c0ca6c7aa926af6b41609e157c11a15b2077d4a5ce3aa" Oct 07 07:57:25 crc kubenswrapper[4875]: I1007 07:57:25.171685 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:25Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:25 crc kubenswrapper[4875]: I1007 07:57:25.182297 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-9tw9m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f8b02a8a-b8d2-4097-b768-132e4c46938a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f6efce0afada950650dfd1a7345a5132509ead755458e53915fcf3335ffe73e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-9tw9m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:25Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:25 crc kubenswrapper[4875]: I1007 07:57:25.195105 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-zk2kz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7021fc56-b485-4ca6-80e8-56665ade004f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff571e5d1552af3387066929e3c02a939848e65fbf853aec600b03e0f72d5d3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d91377f30f2c7676df79d679bad49e0a162667214bd7e6ed4ad8ef3606a8752\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d91377f30f2c7676df79d679bad49e0a162667214bd7e6ed4ad8ef3606a8752\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecf0876b0824ab4fd9c692b0455050ac0a2d61663f74f13f578aebd6f3869f3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ecf0876b0824ab4fd9c692b0455050ac0a2d61663f74f13f578aebd6f3869f3f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9714afc60a059f6aa35d965ae1e15ea03e01cf9162a44d632ceed9d772e966bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9714afc60a059f6aa35d965ae1e15ea03e01cf9162a44d632ceed9d772e966bf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1f6109783534c4c80c81daf9eb0f04a9fbced0b143aeaa0c76683b9d2d38669\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c1f6109783534c4c80c81daf9eb0f04a9fbced0b143aeaa0c76683b9d2d38669\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c07a8fe8d39f3de33d3b132c33386a5c63a0948a1205f48e9a8788dd77ba8bc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c07a8fe8d39f3de33d3b132c33386a5c63a0948a1205f48e9a8788dd77ba8bc2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bce803064d5256265cdaf7f6e16d02780eab07d41d1f547dedd1c1b85972f543\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bce803064d5256265cdaf7f6e16d02780eab07d41d1f547dedd1c1b85972f543\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-zk2kz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:25Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:25 crc kubenswrapper[4875]: I1007 07:57:25.205223 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4ed41f5e-e186-4ede-a0a5-d5a015781ba1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce6527e17c61ccce5c81a87b9a6d3139c9f17a0736c08dfbb8907610d6270adb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c8f4b6a57610b6440afa3652ea5c2926072ad3bc658c72c30c8d28a2832cf900\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8a810dc9c2d8d92af77e0b4c1a21ab24e7da3de04f9c1b00b8399ec0b2cf4ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://965fc2ef9b26d12ee6c5e6adb88200cb52574c8f861f08d1f767c23ea861e6ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://965fc2ef9b26d12ee6c5e6adb88200cb52574c8f861f08d1f767c23ea861e6ef\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:16Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:15Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:25Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:25 crc kubenswrapper[4875]: I1007 07:57:25.209597 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:25 crc kubenswrapper[4875]: I1007 07:57:25.209890 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:25 crc kubenswrapper[4875]: I1007 07:57:25.209901 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:25 crc kubenswrapper[4875]: I1007 07:57:25.209915 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:25 crc kubenswrapper[4875]: I1007 07:57:25.209925 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:25Z","lastTransitionTime":"2025-10-07T07:57:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:25 crc kubenswrapper[4875]: I1007 07:57:25.217581 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:25Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:25 crc kubenswrapper[4875]: I1007 07:57:25.227927 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://455d92e6c0a43eabcbdc94940cb5bb0308a747e311fb79711b5ec8586d2f290c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e98da4809cdbbaa0bc7a3ed73df6eeb9260101f8336e16e3e93cba2be423f957\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:25Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:25 crc kubenswrapper[4875]: I1007 07:57:25.243899 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-8tcxj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f7806e48-48e7-4680-af2e-e93a05003370\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e5e9d96fa7102ca0dcbb0fcc156b21899217a32baae85be23379cca6c8a6a93c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2df6aadbe568d17b58738f194596f0e83432176769f7c5bd87e7d4000d3f1ed8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5eb9157d6f41a164c7cb952c983b441030a97a8f3b8009359440c5eba9846f26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8c31cbcb48e003dac873f108a5eb3aaff8d7016f7557fb683ae607d7819d7ceb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c944396320312ab92eb8dd7f1d771a00b965031717edbbeb2b467cee0788dec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6db9e77922c120dbfa1ccd55c96777ac3b18e0fcf8bb96956414858ae2c8fcff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://75be2ad687d37c776e821aa271a95c7dda1c9bd5d9c705202fb41c7d27d8c3eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75be2ad687d37c776e821aa271a95c7dda1c9bd5d9c705202fb41c7d27d8c3eb\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-07T07:57:07Z\\\",\\\"message\\\":\\\" start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:07Z is after 2025-08-24T17:21:41Z]\\\\nI1007 07:57:07.568842 6518 services_controller.go:473] Services do not match for network=default, existing lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-marketplace/redhat-marketplace_TCP_cluster\\\\\\\", UUID:\\\\\\\"97b6e7b0-06ca-455e-8259-06895040cb0c\\\\\\\", Protocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-marketplace/redhat-marketplace\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T07:57:06Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-8tcxj_openshift-ovn-kubernetes(f7806e48-48e7-4680-af2e-e93a05003370)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b3b1269130a0a1192546bec4aa5b2d4bfd65f381f6cd3948dd54d49b482e6d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e7cdfd8fed2f21e7063841e5382e668130288feffd52ef6fc5f9b5bc1894f2a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e7cdfd8fed2f21e7063841e5382e668130288feffd52ef6fc5f9b5bc1894f2a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-8tcxj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:25Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:25 crc kubenswrapper[4875]: I1007 07:57:25.253377 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ddx6l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0605d52-0cfc-4bcf-9218-1991257047cd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bdc7823b5b4f40adc245f9650864ebcb03f791dcbd6167f41c362bcdcc3a6655\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l9bwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:39Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ddx6l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:25Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:25 crc kubenswrapper[4875]: I1007 07:57:25.265856 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62ae4c53-7cf1-4229-84b4-045397dbcfba\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ae8881986c1b2abd841adfadbac7dc3d73096c5366485350536c08f478f9762\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://05f20f2103458077317614bcd5338d803c52c67e7dfc562103c817d33fa5bc79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f2213a20d36b8a125c6004957d5be402e0b85bbc2165ebd964ab499dfffb877\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://841f5c2218ad3912bf23dc2df80f24ca696829ba8e2c2d9722264688b25f0846\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:15Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:25Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:25 crc kubenswrapper[4875]: I1007 07:57:25.283485 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3527b3b9-1734-481d-9d80-9093e388afec\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7149243f8781d5a5e845821a8016bc06ec3678d69dfbde72d3429c48c64a98b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51d55feee1c9378afae0885cd063f39a2b4bd057db72c776941837d8d4098132\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6dc29635af9843109e3c200fc756d352e1a81b01dcc52a30a005291d1fa9f8d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e0b3573713f1b9d23177e5a1412d8db366ca718e6c19f6af9e234940505f2e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20382715fd9d79a3b60b16d950d6c4fe5b0d34ae14ad034f684d97762205263a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cfae59bc7d6183b2b5c6c04dd53b1bfc8b62abf90f67f74c7a56bf5039a5b50a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cfae59bc7d6183b2b5c6c04dd53b1bfc8b62abf90f67f74c7a56bf5039a5b50a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88968e9f639dc3664a87729128c531283b37b7c2da6aae70905f9f63a980fb54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88968e9f639dc3664a87729128c531283b37b7c2da6aae70905f9f63a980fb54\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://1ae0cbc996e8c2d24e8fbbf34b292d994bcc704c7b27fdc8b7fb7e64be0902ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1ae0cbc996e8c2d24e8fbbf34b292d994bcc704c7b27fdc8b7fb7e64be0902ca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:15Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:25Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:25 crc kubenswrapper[4875]: I1007 07:57:25.299683 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:25Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:25 crc kubenswrapper[4875]: I1007 07:57:25.312380 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:25 crc kubenswrapper[4875]: I1007 07:57:25.312437 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:25 crc kubenswrapper[4875]: I1007 07:57:25.312451 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:25 crc kubenswrapper[4875]: I1007 07:57:25.312471 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:25 crc kubenswrapper[4875]: I1007 07:57:25.312482 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:25Z","lastTransitionTime":"2025-10-07T07:57:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:25 crc kubenswrapper[4875]: I1007 07:57:25.317204 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43e570e7b8391b0f38f3228dc0380ac8ee23e83f498989343bdee6a53c3ede41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:25Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:25 crc kubenswrapper[4875]: I1007 07:57:25.327903 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01fe5bea18c473e716831e2a9a876a8517d6de78b9b870929ecc1c4397d33fb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:25Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:25 crc kubenswrapper[4875]: I1007 07:57:25.340955 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wc2jq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e5a790e1-c591-4cfc-930f-4805a923790b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0fda9fd366641ca5799c0ca6c7aa926af6b41609e157c11a15b2077d4a5ce3aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0fda9fd366641ca5799c0ca6c7aa926af6b41609e157c11a15b2077d4a5ce3aa\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-07T07:57:24Z\\\",\\\"message\\\":\\\"2025-10-07T07:56:39+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_790ceaf5-76bc-41c6-a081-e5e3ea810537\\\\n2025-10-07T07:56:39+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_790ceaf5-76bc-41c6-a081-e5e3ea810537 to /host/opt/cni/bin/\\\\n2025-10-07T07:56:39Z [verbose] multus-daemon started\\\\n2025-10-07T07:56:39Z [verbose] Readiness Indicator file check\\\\n2025-10-07T07:57:24Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4s782\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wc2jq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:25Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:25 crc kubenswrapper[4875]: I1007 07:57:25.352908 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3928c10c-c3da-41eb-96b2-629d67cfb31f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://27e5a1d7a65296f23b224f9ace2760c32b6e1ca146cf7a42a601de22632250d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qd4f6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34b235fca1c383e868641a953bb908901bcd10528ee9fb668c1f71eb6e0e978a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qd4f6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hx68m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:25Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:25 crc kubenswrapper[4875]: I1007 07:57:25.365987 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rbr4j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1d636971-3387-4f3c-b4a1-54a1da1e2fbe\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c836128e294716bdbbc7bf57f81f71a88656e83be509510722d13b3f66e7d3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7bd9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://12ef6aa200be5c0cc9b34435271b54e4852ad9118d7334d57e07c1f6f75100f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7bd9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:48Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-rbr4j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:25Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:25 crc kubenswrapper[4875]: I1007 07:57:25.378289 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"01cd6f59-4f16-41d3-b43b-cd3cb9efd9b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ca1c70fa4c2dd9e87cc15766e27c2735768f392d019d6004ae50c94595651c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4bed5cf79847998d3d72c69b2f5773d64affb1a1f13ecc3af99bda8307bb8d1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ba6ab5ec6a70a00059ba119970c6b139879c3c568475f2acf45b00f4ade9e22\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10e2645f8bd2ed4e578809e1a15c65b5a0bf19a2bc135dd5f626b2b3dfce0113\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://671f4d3e518bf0230af61a6bfc3f70b8cea7f0e8f972ad605a78cded0306b084\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"message\\\":\\\"le observer\\\\nW1007 07:56:35.379471 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1007 07:56:35.379613 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1007 07:56:35.380419 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1733405231/tls.crt::/tmp/serving-cert-1733405231/tls.key\\\\\\\"\\\\nI1007 07:56:35.960385 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1007 07:56:35.963357 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1007 07:56:35.963380 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1007 07:56:35.963401 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1007 07:56:35.963407 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1007 07:56:35.978178 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1007 07:56:35.978231 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1007 07:56:35.978242 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI1007 07:56:35.978236 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1007 07:56:35.978254 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1007 07:56:35.978304 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1007 07:56:35.978312 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1007 07:56:35.978319 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1007 07:56:35.979012 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0b72e1d82ba1cfc118ea122535db5c358f503a17b497c21c481e1b56bb578e8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec502d866371b40ecc3c8728889725cc1c2d9c8be18dbce17935992980fc1b9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec502d866371b40ecc3c8728889725cc1c2d9c8be18dbce17935992980fc1b9a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:25Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:25 crc kubenswrapper[4875]: I1007 07:57:25.388054 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5e7e967b-bfbd-4738-9082-7bc94f2f32fb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21a5c9629b4ec0c85ab80d893dd502732395c3dd8b418a686d3c2ac55e69fa8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1222e78ddf0e6e37cfd1354cd95301c49aac7faf38193b66c6a3b66b0617d018\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1222e78ddf0e6e37cfd1354cd95301c49aac7faf38193b66c6a3b66b0617d018\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:15Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:25Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:25 crc kubenswrapper[4875]: I1007 07:57:25.399918 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-wk8rw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dce21abc-1295-4d45-bd26-07b7e37d674c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:50Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:50Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jx7vv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jx7vv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:50Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-wk8rw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:25Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:25 crc kubenswrapper[4875]: I1007 07:57:25.414637 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:25 crc kubenswrapper[4875]: I1007 07:57:25.414850 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:25 crc kubenswrapper[4875]: I1007 07:57:25.414937 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:25 crc kubenswrapper[4875]: I1007 07:57:25.415041 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:25 crc kubenswrapper[4875]: I1007 07:57:25.415113 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:25Z","lastTransitionTime":"2025-10-07T07:57:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:25 crc kubenswrapper[4875]: I1007 07:57:25.517404 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:25 crc kubenswrapper[4875]: I1007 07:57:25.517440 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:25 crc kubenswrapper[4875]: I1007 07:57:25.517448 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:25 crc kubenswrapper[4875]: I1007 07:57:25.517464 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:25 crc kubenswrapper[4875]: I1007 07:57:25.517475 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:25Z","lastTransitionTime":"2025-10-07T07:57:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:25 crc kubenswrapper[4875]: I1007 07:57:25.620407 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:25 crc kubenswrapper[4875]: I1007 07:57:25.620450 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:25 crc kubenswrapper[4875]: I1007 07:57:25.620461 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:25 crc kubenswrapper[4875]: I1007 07:57:25.620477 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:25 crc kubenswrapper[4875]: I1007 07:57:25.620485 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:25Z","lastTransitionTime":"2025-10-07T07:57:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:25 crc kubenswrapper[4875]: I1007 07:57:25.697055 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 07:57:25 crc kubenswrapper[4875]: E1007 07:57:25.697191 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 07:57:25 crc kubenswrapper[4875]: I1007 07:57:25.697433 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 07:57:25 crc kubenswrapper[4875]: E1007 07:57:25.697725 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 07:57:25 crc kubenswrapper[4875]: I1007 07:57:25.714410 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-9tw9m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f8b02a8a-b8d2-4097-b768-132e4c46938a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f6efce0afada950650dfd1a7345a5132509ead755458e53915fcf3335ffe73e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-9tw9m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:25Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:25 crc kubenswrapper[4875]: I1007 07:57:25.723040 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:25 crc kubenswrapper[4875]: I1007 07:57:25.723336 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:25 crc kubenswrapper[4875]: I1007 07:57:25.723488 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:25 crc kubenswrapper[4875]: I1007 07:57:25.723595 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:25 crc kubenswrapper[4875]: I1007 07:57:25.723677 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:25Z","lastTransitionTime":"2025-10-07T07:57:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:25 crc kubenswrapper[4875]: I1007 07:57:25.731306 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:25Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:25 crc kubenswrapper[4875]: I1007 07:57:25.748957 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:25Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:25 crc kubenswrapper[4875]: I1007 07:57:25.766977 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-zk2kz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7021fc56-b485-4ca6-80e8-56665ade004f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff571e5d1552af3387066929e3c02a939848e65fbf853aec600b03e0f72d5d3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d91377f30f2c7676df79d679bad49e0a162667214bd7e6ed4ad8ef3606a8752\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d91377f30f2c7676df79d679bad49e0a162667214bd7e6ed4ad8ef3606a8752\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecf0876b0824ab4fd9c692b0455050ac0a2d61663f74f13f578aebd6f3869f3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ecf0876b0824ab4fd9c692b0455050ac0a2d61663f74f13f578aebd6f3869f3f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9714afc60a059f6aa35d965ae1e15ea03e01cf9162a44d632ceed9d772e966bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9714afc60a059f6aa35d965ae1e15ea03e01cf9162a44d632ceed9d772e966bf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1f6109783534c4c80c81daf9eb0f04a9fbced0b143aeaa0c76683b9d2d38669\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c1f6109783534c4c80c81daf9eb0f04a9fbced0b143aeaa0c76683b9d2d38669\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c07a8fe8d39f3de33d3b132c33386a5c63a0948a1205f48e9a8788dd77ba8bc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c07a8fe8d39f3de33d3b132c33386a5c63a0948a1205f48e9a8788dd77ba8bc2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bce803064d5256265cdaf7f6e16d02780eab07d41d1f547dedd1c1b85972f543\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bce803064d5256265cdaf7f6e16d02780eab07d41d1f547dedd1c1b85972f543\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-zk2kz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:25Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:25 crc kubenswrapper[4875]: I1007 07:57:25.780845 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4ed41f5e-e186-4ede-a0a5-d5a015781ba1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce6527e17c61ccce5c81a87b9a6d3139c9f17a0736c08dfbb8907610d6270adb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c8f4b6a57610b6440afa3652ea5c2926072ad3bc658c72c30c8d28a2832cf900\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8a810dc9c2d8d92af77e0b4c1a21ab24e7da3de04f9c1b00b8399ec0b2cf4ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://965fc2ef9b26d12ee6c5e6adb88200cb52574c8f861f08d1f767c23ea861e6ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://965fc2ef9b26d12ee6c5e6adb88200cb52574c8f861f08d1f767c23ea861e6ef\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:16Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:15Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:25Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:25 crc kubenswrapper[4875]: I1007 07:57:25.800063 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3527b3b9-1734-481d-9d80-9093e388afec\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7149243f8781d5a5e845821a8016bc06ec3678d69dfbde72d3429c48c64a98b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51d55feee1c9378afae0885cd063f39a2b4bd057db72c776941837d8d4098132\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6dc29635af9843109e3c200fc756d352e1a81b01dcc52a30a005291d1fa9f8d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e0b3573713f1b9d23177e5a1412d8db366ca718e6c19f6af9e234940505f2e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20382715fd9d79a3b60b16d950d6c4fe5b0d34ae14ad034f684d97762205263a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cfae59bc7d6183b2b5c6c04dd53b1bfc8b62abf90f67f74c7a56bf5039a5b50a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cfae59bc7d6183b2b5c6c04dd53b1bfc8b62abf90f67f74c7a56bf5039a5b50a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88968e9f639dc3664a87729128c531283b37b7c2da6aae70905f9f63a980fb54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88968e9f639dc3664a87729128c531283b37b7c2da6aae70905f9f63a980fb54\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://1ae0cbc996e8c2d24e8fbbf34b292d994bcc704c7b27fdc8b7fb7e64be0902ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1ae0cbc996e8c2d24e8fbbf34b292d994bcc704c7b27fdc8b7fb7e64be0902ca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:15Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:25Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:25 crc kubenswrapper[4875]: I1007 07:57:25.814059 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://455d92e6c0a43eabcbdc94940cb5bb0308a747e311fb79711b5ec8586d2f290c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e98da4809cdbbaa0bc7a3ed73df6eeb9260101f8336e16e3e93cba2be423f957\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:25Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:25 crc kubenswrapper[4875]: I1007 07:57:25.826439 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:25 crc kubenswrapper[4875]: I1007 07:57:25.826493 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:25 crc kubenswrapper[4875]: I1007 07:57:25.826506 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:25 crc kubenswrapper[4875]: I1007 07:57:25.826524 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:25 crc kubenswrapper[4875]: I1007 07:57:25.826536 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:25Z","lastTransitionTime":"2025-10-07T07:57:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:25 crc kubenswrapper[4875]: I1007 07:57:25.836658 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-8tcxj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f7806e48-48e7-4680-af2e-e93a05003370\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e5e9d96fa7102ca0dcbb0fcc156b21899217a32baae85be23379cca6c8a6a93c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2df6aadbe568d17b58738f194596f0e83432176769f7c5bd87e7d4000d3f1ed8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5eb9157d6f41a164c7cb952c983b441030a97a8f3b8009359440c5eba9846f26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8c31cbcb48e003dac873f108a5eb3aaff8d7016f7557fb683ae607d7819d7ceb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c944396320312ab92eb8dd7f1d771a00b965031717edbbeb2b467cee0788dec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6db9e77922c120dbfa1ccd55c96777ac3b18e0fcf8bb96956414858ae2c8fcff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://75be2ad687d37c776e821aa271a95c7dda1c9bd5d9c705202fb41c7d27d8c3eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75be2ad687d37c776e821aa271a95c7dda1c9bd5d9c705202fb41c7d27d8c3eb\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-07T07:57:07Z\\\",\\\"message\\\":\\\" start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:07Z is after 2025-08-24T17:21:41Z]\\\\nI1007 07:57:07.568842 6518 services_controller.go:473] Services do not match for network=default, existing lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-marketplace/redhat-marketplace_TCP_cluster\\\\\\\", UUID:\\\\\\\"97b6e7b0-06ca-455e-8259-06895040cb0c\\\\\\\", Protocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-marketplace/redhat-marketplace\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T07:57:06Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-8tcxj_openshift-ovn-kubernetes(f7806e48-48e7-4680-af2e-e93a05003370)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b3b1269130a0a1192546bec4aa5b2d4bfd65f381f6cd3948dd54d49b482e6d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e7cdfd8fed2f21e7063841e5382e668130288feffd52ef6fc5f9b5bc1894f2a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e7cdfd8fed2f21e7063841e5382e668130288feffd52ef6fc5f9b5bc1894f2a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-8tcxj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:25Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:25 crc kubenswrapper[4875]: I1007 07:57:25.849490 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ddx6l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0605d52-0cfc-4bcf-9218-1991257047cd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bdc7823b5b4f40adc245f9650864ebcb03f791dcbd6167f41c362bcdcc3a6655\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l9bwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:39Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ddx6l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:25Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:25 crc kubenswrapper[4875]: I1007 07:57:25.861155 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62ae4c53-7cf1-4229-84b4-045397dbcfba\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ae8881986c1b2abd841adfadbac7dc3d73096c5366485350536c08f478f9762\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://05f20f2103458077317614bcd5338d803c52c67e7dfc562103c817d33fa5bc79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f2213a20d36b8a125c6004957d5be402e0b85bbc2165ebd964ab499dfffb877\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://841f5c2218ad3912bf23dc2df80f24ca696829ba8e2c2d9722264688b25f0846\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:15Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:25Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:25 crc kubenswrapper[4875]: I1007 07:57:25.873350 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5e7e967b-bfbd-4738-9082-7bc94f2f32fb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21a5c9629b4ec0c85ab80d893dd502732395c3dd8b418a686d3c2ac55e69fa8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1222e78ddf0e6e37cfd1354cd95301c49aac7faf38193b66c6a3b66b0617d018\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1222e78ddf0e6e37cfd1354cd95301c49aac7faf38193b66c6a3b66b0617d018\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:15Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:25Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:25 crc kubenswrapper[4875]: I1007 07:57:25.885725 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:25Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:25 crc kubenswrapper[4875]: I1007 07:57:25.896981 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43e570e7b8391b0f38f3228dc0380ac8ee23e83f498989343bdee6a53c3ede41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:25Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:25 crc kubenswrapper[4875]: I1007 07:57:25.910590 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01fe5bea18c473e716831e2a9a876a8517d6de78b9b870929ecc1c4397d33fb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:25Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:25 crc kubenswrapper[4875]: I1007 07:57:25.923166 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wc2jq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e5a790e1-c591-4cfc-930f-4805a923790b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0fda9fd366641ca5799c0ca6c7aa926af6b41609e157c11a15b2077d4a5ce3aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0fda9fd366641ca5799c0ca6c7aa926af6b41609e157c11a15b2077d4a5ce3aa\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-07T07:57:24Z\\\",\\\"message\\\":\\\"2025-10-07T07:56:39+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_790ceaf5-76bc-41c6-a081-e5e3ea810537\\\\n2025-10-07T07:56:39+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_790ceaf5-76bc-41c6-a081-e5e3ea810537 to /host/opt/cni/bin/\\\\n2025-10-07T07:56:39Z [verbose] multus-daemon started\\\\n2025-10-07T07:56:39Z [verbose] Readiness Indicator file check\\\\n2025-10-07T07:57:24Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4s782\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wc2jq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:25Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:25 crc kubenswrapper[4875]: I1007 07:57:25.928559 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:25 crc kubenswrapper[4875]: I1007 07:57:25.928604 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:25 crc kubenswrapper[4875]: I1007 07:57:25.928617 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:25 crc kubenswrapper[4875]: I1007 07:57:25.928637 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:25 crc kubenswrapper[4875]: I1007 07:57:25.928650 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:25Z","lastTransitionTime":"2025-10-07T07:57:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:25 crc kubenswrapper[4875]: I1007 07:57:25.936068 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3928c10c-c3da-41eb-96b2-629d67cfb31f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://27e5a1d7a65296f23b224f9ace2760c32b6e1ca146cf7a42a601de22632250d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qd4f6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34b235fca1c383e868641a953bb908901bcd10528ee9fb668c1f71eb6e0e978a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qd4f6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hx68m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:25Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:25 crc kubenswrapper[4875]: I1007 07:57:25.950368 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rbr4j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1d636971-3387-4f3c-b4a1-54a1da1e2fbe\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c836128e294716bdbbc7bf57f81f71a88656e83be509510722d13b3f66e7d3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7bd9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://12ef6aa200be5c0cc9b34435271b54e4852ad9118d7334d57e07c1f6f75100f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7bd9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:48Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-rbr4j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:25Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:25 crc kubenswrapper[4875]: I1007 07:57:25.964940 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"01cd6f59-4f16-41d3-b43b-cd3cb9efd9b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ca1c70fa4c2dd9e87cc15766e27c2735768f392d019d6004ae50c94595651c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4bed5cf79847998d3d72c69b2f5773d64affb1a1f13ecc3af99bda8307bb8d1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ba6ab5ec6a70a00059ba119970c6b139879c3c568475f2acf45b00f4ade9e22\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10e2645f8bd2ed4e578809e1a15c65b5a0bf19a2bc135dd5f626b2b3dfce0113\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://671f4d3e518bf0230af61a6bfc3f70b8cea7f0e8f972ad605a78cded0306b084\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"message\\\":\\\"le observer\\\\nW1007 07:56:35.379471 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1007 07:56:35.379613 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1007 07:56:35.380419 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1733405231/tls.crt::/tmp/serving-cert-1733405231/tls.key\\\\\\\"\\\\nI1007 07:56:35.960385 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1007 07:56:35.963357 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1007 07:56:35.963380 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1007 07:56:35.963401 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1007 07:56:35.963407 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1007 07:56:35.978178 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1007 07:56:35.978231 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1007 07:56:35.978242 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI1007 07:56:35.978236 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1007 07:56:35.978254 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1007 07:56:35.978304 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1007 07:56:35.978312 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1007 07:56:35.978319 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1007 07:56:35.979012 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0b72e1d82ba1cfc118ea122535db5c358f503a17b497c21c481e1b56bb578e8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec502d866371b40ecc3c8728889725cc1c2d9c8be18dbce17935992980fc1b9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec502d866371b40ecc3c8728889725cc1c2d9c8be18dbce17935992980fc1b9a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:25Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:25 crc kubenswrapper[4875]: I1007 07:57:25.976770 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-wk8rw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dce21abc-1295-4d45-bd26-07b7e37d674c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:50Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:50Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jx7vv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jx7vv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:50Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-wk8rw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:25Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:26 crc kubenswrapper[4875]: I1007 07:57:26.031643 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:26 crc kubenswrapper[4875]: I1007 07:57:26.032130 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:26 crc kubenswrapper[4875]: I1007 07:57:26.032264 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:26 crc kubenswrapper[4875]: I1007 07:57:26.032366 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:26 crc kubenswrapper[4875]: I1007 07:57:26.032465 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:26Z","lastTransitionTime":"2025-10-07T07:57:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:26 crc kubenswrapper[4875]: I1007 07:57:26.135940 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:26 crc kubenswrapper[4875]: I1007 07:57:26.135987 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:26 crc kubenswrapper[4875]: I1007 07:57:26.135998 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:26 crc kubenswrapper[4875]: I1007 07:57:26.136017 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:26 crc kubenswrapper[4875]: I1007 07:57:26.136029 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:26Z","lastTransitionTime":"2025-10-07T07:57:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:26 crc kubenswrapper[4875]: I1007 07:57:26.162212 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-wc2jq_e5a790e1-c591-4cfc-930f-4805a923790b/kube-multus/0.log" Oct 07 07:57:26 crc kubenswrapper[4875]: I1007 07:57:26.162282 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-wc2jq" event={"ID":"e5a790e1-c591-4cfc-930f-4805a923790b","Type":"ContainerStarted","Data":"512ba91c00a610fe56d1abb8dd82712ba14ce76b4148fe0c69e8864c9a87f977"} Oct 07 07:57:26 crc kubenswrapper[4875]: I1007 07:57:26.177764 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://455d92e6c0a43eabcbdc94940cb5bb0308a747e311fb79711b5ec8586d2f290c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e98da4809cdbbaa0bc7a3ed73df6eeb9260101f8336e16e3e93cba2be423f957\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:26Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:26 crc kubenswrapper[4875]: I1007 07:57:26.199234 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-8tcxj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f7806e48-48e7-4680-af2e-e93a05003370\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e5e9d96fa7102ca0dcbb0fcc156b21899217a32baae85be23379cca6c8a6a93c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2df6aadbe568d17b58738f194596f0e83432176769f7c5bd87e7d4000d3f1ed8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5eb9157d6f41a164c7cb952c983b441030a97a8f3b8009359440c5eba9846f26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8c31cbcb48e003dac873f108a5eb3aaff8d7016f7557fb683ae607d7819d7ceb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c944396320312ab92eb8dd7f1d771a00b965031717edbbeb2b467cee0788dec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6db9e77922c120dbfa1ccd55c96777ac3b18e0fcf8bb96956414858ae2c8fcff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://75be2ad687d37c776e821aa271a95c7dda1c9bd5d9c705202fb41c7d27d8c3eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75be2ad687d37c776e821aa271a95c7dda1c9bd5d9c705202fb41c7d27d8c3eb\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-07T07:57:07Z\\\",\\\"message\\\":\\\" start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:07Z is after 2025-08-24T17:21:41Z]\\\\nI1007 07:57:07.568842 6518 services_controller.go:473] Services do not match for network=default, existing lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-marketplace/redhat-marketplace_TCP_cluster\\\\\\\", UUID:\\\\\\\"97b6e7b0-06ca-455e-8259-06895040cb0c\\\\\\\", Protocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-marketplace/redhat-marketplace\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T07:57:06Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-8tcxj_openshift-ovn-kubernetes(f7806e48-48e7-4680-af2e-e93a05003370)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b3b1269130a0a1192546bec4aa5b2d4bfd65f381f6cd3948dd54d49b482e6d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e7cdfd8fed2f21e7063841e5382e668130288feffd52ef6fc5f9b5bc1894f2a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e7cdfd8fed2f21e7063841e5382e668130288feffd52ef6fc5f9b5bc1894f2a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-8tcxj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:26Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:26 crc kubenswrapper[4875]: I1007 07:57:26.211647 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ddx6l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0605d52-0cfc-4bcf-9218-1991257047cd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bdc7823b5b4f40adc245f9650864ebcb03f791dcbd6167f41c362bcdcc3a6655\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l9bwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:39Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ddx6l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:26Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:26 crc kubenswrapper[4875]: I1007 07:57:26.225238 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62ae4c53-7cf1-4229-84b4-045397dbcfba\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ae8881986c1b2abd841adfadbac7dc3d73096c5366485350536c08f478f9762\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://05f20f2103458077317614bcd5338d803c52c67e7dfc562103c817d33fa5bc79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f2213a20d36b8a125c6004957d5be402e0b85bbc2165ebd964ab499dfffb877\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://841f5c2218ad3912bf23dc2df80f24ca696829ba8e2c2d9722264688b25f0846\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:15Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:26Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:26 crc kubenswrapper[4875]: I1007 07:57:26.238218 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:26 crc kubenswrapper[4875]: I1007 07:57:26.238263 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:26 crc kubenswrapper[4875]: I1007 07:57:26.238277 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:26 crc kubenswrapper[4875]: I1007 07:57:26.238299 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:26 crc kubenswrapper[4875]: I1007 07:57:26.238318 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:26Z","lastTransitionTime":"2025-10-07T07:57:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:26 crc kubenswrapper[4875]: I1007 07:57:26.245922 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3527b3b9-1734-481d-9d80-9093e388afec\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7149243f8781d5a5e845821a8016bc06ec3678d69dfbde72d3429c48c64a98b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51d55feee1c9378afae0885cd063f39a2b4bd057db72c776941837d8d4098132\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6dc29635af9843109e3c200fc756d352e1a81b01dcc52a30a005291d1fa9f8d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e0b3573713f1b9d23177e5a1412d8db366ca718e6c19f6af9e234940505f2e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20382715fd9d79a3b60b16d950d6c4fe5b0d34ae14ad034f684d97762205263a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cfae59bc7d6183b2b5c6c04dd53b1bfc8b62abf90f67f74c7a56bf5039a5b50a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cfae59bc7d6183b2b5c6c04dd53b1bfc8b62abf90f67f74c7a56bf5039a5b50a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88968e9f639dc3664a87729128c531283b37b7c2da6aae70905f9f63a980fb54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88968e9f639dc3664a87729128c531283b37b7c2da6aae70905f9f63a980fb54\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://1ae0cbc996e8c2d24e8fbbf34b292d994bcc704c7b27fdc8b7fb7e64be0902ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1ae0cbc996e8c2d24e8fbbf34b292d994bcc704c7b27fdc8b7fb7e64be0902ca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:15Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:26Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:26 crc kubenswrapper[4875]: I1007 07:57:26.259597 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:26Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:26 crc kubenswrapper[4875]: I1007 07:57:26.273274 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43e570e7b8391b0f38f3228dc0380ac8ee23e83f498989343bdee6a53c3ede41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:26Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:26 crc kubenswrapper[4875]: I1007 07:57:26.285044 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01fe5bea18c473e716831e2a9a876a8517d6de78b9b870929ecc1c4397d33fb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:26Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:26 crc kubenswrapper[4875]: I1007 07:57:26.296925 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wc2jq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e5a790e1-c591-4cfc-930f-4805a923790b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://512ba91c00a610fe56d1abb8dd82712ba14ce76b4148fe0c69e8864c9a87f977\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0fda9fd366641ca5799c0ca6c7aa926af6b41609e157c11a15b2077d4a5ce3aa\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-07T07:57:24Z\\\",\\\"message\\\":\\\"2025-10-07T07:56:39+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_790ceaf5-76bc-41c6-a081-e5e3ea810537\\\\n2025-10-07T07:56:39+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_790ceaf5-76bc-41c6-a081-e5e3ea810537 to /host/opt/cni/bin/\\\\n2025-10-07T07:56:39Z [verbose] multus-daemon started\\\\n2025-10-07T07:56:39Z [verbose] Readiness Indicator file check\\\\n2025-10-07T07:57:24Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:57:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4s782\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wc2jq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:26Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:26 crc kubenswrapper[4875]: I1007 07:57:26.310685 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3928c10c-c3da-41eb-96b2-629d67cfb31f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://27e5a1d7a65296f23b224f9ace2760c32b6e1ca146cf7a42a601de22632250d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qd4f6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34b235fca1c383e868641a953bb908901bcd10528ee9fb668c1f71eb6e0e978a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qd4f6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hx68m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:26Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:26 crc kubenswrapper[4875]: I1007 07:57:26.326504 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rbr4j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1d636971-3387-4f3c-b4a1-54a1da1e2fbe\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c836128e294716bdbbc7bf57f81f71a88656e83be509510722d13b3f66e7d3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7bd9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://12ef6aa200be5c0cc9b34435271b54e4852ad9118d7334d57e07c1f6f75100f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7bd9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:48Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-rbr4j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:26Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:26 crc kubenswrapper[4875]: I1007 07:57:26.339138 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"01cd6f59-4f16-41d3-b43b-cd3cb9efd9b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ca1c70fa4c2dd9e87cc15766e27c2735768f392d019d6004ae50c94595651c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4bed5cf79847998d3d72c69b2f5773d64affb1a1f13ecc3af99bda8307bb8d1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ba6ab5ec6a70a00059ba119970c6b139879c3c568475f2acf45b00f4ade9e22\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10e2645f8bd2ed4e578809e1a15c65b5a0bf19a2bc135dd5f626b2b3dfce0113\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://671f4d3e518bf0230af61a6bfc3f70b8cea7f0e8f972ad605a78cded0306b084\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"message\\\":\\\"le observer\\\\nW1007 07:56:35.379471 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1007 07:56:35.379613 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1007 07:56:35.380419 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1733405231/tls.crt::/tmp/serving-cert-1733405231/tls.key\\\\\\\"\\\\nI1007 07:56:35.960385 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1007 07:56:35.963357 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1007 07:56:35.963380 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1007 07:56:35.963401 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1007 07:56:35.963407 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1007 07:56:35.978178 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1007 07:56:35.978231 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1007 07:56:35.978242 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI1007 07:56:35.978236 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1007 07:56:35.978254 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1007 07:56:35.978304 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1007 07:56:35.978312 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1007 07:56:35.978319 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1007 07:56:35.979012 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0b72e1d82ba1cfc118ea122535db5c358f503a17b497c21c481e1b56bb578e8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec502d866371b40ecc3c8728889725cc1c2d9c8be18dbce17935992980fc1b9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec502d866371b40ecc3c8728889725cc1c2d9c8be18dbce17935992980fc1b9a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:26Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:26 crc kubenswrapper[4875]: I1007 07:57:26.340342 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:26 crc kubenswrapper[4875]: I1007 07:57:26.340372 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:26 crc kubenswrapper[4875]: I1007 07:57:26.340381 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:26 crc kubenswrapper[4875]: I1007 07:57:26.340396 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:26 crc kubenswrapper[4875]: I1007 07:57:26.340407 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:26Z","lastTransitionTime":"2025-10-07T07:57:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:26 crc kubenswrapper[4875]: I1007 07:57:26.349431 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5e7e967b-bfbd-4738-9082-7bc94f2f32fb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21a5c9629b4ec0c85ab80d893dd502732395c3dd8b418a686d3c2ac55e69fa8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1222e78ddf0e6e37cfd1354cd95301c49aac7faf38193b66c6a3b66b0617d018\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1222e78ddf0e6e37cfd1354cd95301c49aac7faf38193b66c6a3b66b0617d018\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:15Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:26Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:26 crc kubenswrapper[4875]: I1007 07:57:26.359831 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-wk8rw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dce21abc-1295-4d45-bd26-07b7e37d674c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:50Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:50Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jx7vv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jx7vv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:50Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-wk8rw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:26Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:26 crc kubenswrapper[4875]: I1007 07:57:26.371816 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:26Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:26 crc kubenswrapper[4875]: I1007 07:57:26.383063 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-9tw9m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f8b02a8a-b8d2-4097-b768-132e4c46938a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f6efce0afada950650dfd1a7345a5132509ead755458e53915fcf3335ffe73e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-9tw9m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:26Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:26 crc kubenswrapper[4875]: I1007 07:57:26.396460 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-zk2kz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7021fc56-b485-4ca6-80e8-56665ade004f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff571e5d1552af3387066929e3c02a939848e65fbf853aec600b03e0f72d5d3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d91377f30f2c7676df79d679bad49e0a162667214bd7e6ed4ad8ef3606a8752\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d91377f30f2c7676df79d679bad49e0a162667214bd7e6ed4ad8ef3606a8752\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecf0876b0824ab4fd9c692b0455050ac0a2d61663f74f13f578aebd6f3869f3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ecf0876b0824ab4fd9c692b0455050ac0a2d61663f74f13f578aebd6f3869f3f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9714afc60a059f6aa35d965ae1e15ea03e01cf9162a44d632ceed9d772e966bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9714afc60a059f6aa35d965ae1e15ea03e01cf9162a44d632ceed9d772e966bf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1f6109783534c4c80c81daf9eb0f04a9fbced0b143aeaa0c76683b9d2d38669\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c1f6109783534c4c80c81daf9eb0f04a9fbced0b143aeaa0c76683b9d2d38669\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c07a8fe8d39f3de33d3b132c33386a5c63a0948a1205f48e9a8788dd77ba8bc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c07a8fe8d39f3de33d3b132c33386a5c63a0948a1205f48e9a8788dd77ba8bc2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bce803064d5256265cdaf7f6e16d02780eab07d41d1f547dedd1c1b85972f543\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bce803064d5256265cdaf7f6e16d02780eab07d41d1f547dedd1c1b85972f543\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-zk2kz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:26Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:26 crc kubenswrapper[4875]: I1007 07:57:26.407213 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4ed41f5e-e186-4ede-a0a5-d5a015781ba1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce6527e17c61ccce5c81a87b9a6d3139c9f17a0736c08dfbb8907610d6270adb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c8f4b6a57610b6440afa3652ea5c2926072ad3bc658c72c30c8d28a2832cf900\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8a810dc9c2d8d92af77e0b4c1a21ab24e7da3de04f9c1b00b8399ec0b2cf4ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://965fc2ef9b26d12ee6c5e6adb88200cb52574c8f861f08d1f767c23ea861e6ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://965fc2ef9b26d12ee6c5e6adb88200cb52574c8f861f08d1f767c23ea861e6ef\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:16Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:15Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:26Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:26 crc kubenswrapper[4875]: I1007 07:57:26.420861 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:26Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:26 crc kubenswrapper[4875]: I1007 07:57:26.442656 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:26 crc kubenswrapper[4875]: I1007 07:57:26.442699 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:26 crc kubenswrapper[4875]: I1007 07:57:26.442716 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:26 crc kubenswrapper[4875]: I1007 07:57:26.442735 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:26 crc kubenswrapper[4875]: I1007 07:57:26.442747 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:26Z","lastTransitionTime":"2025-10-07T07:57:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:26 crc kubenswrapper[4875]: I1007 07:57:26.546087 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:26 crc kubenswrapper[4875]: I1007 07:57:26.546147 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:26 crc kubenswrapper[4875]: I1007 07:57:26.546160 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:26 crc kubenswrapper[4875]: I1007 07:57:26.546181 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:26 crc kubenswrapper[4875]: I1007 07:57:26.546192 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:26Z","lastTransitionTime":"2025-10-07T07:57:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:26 crc kubenswrapper[4875]: I1007 07:57:26.648958 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:26 crc kubenswrapper[4875]: I1007 07:57:26.649012 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:26 crc kubenswrapper[4875]: I1007 07:57:26.649029 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:26 crc kubenswrapper[4875]: I1007 07:57:26.649053 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:26 crc kubenswrapper[4875]: I1007 07:57:26.649066 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:26Z","lastTransitionTime":"2025-10-07T07:57:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:26 crc kubenswrapper[4875]: I1007 07:57:26.696764 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 07:57:26 crc kubenswrapper[4875]: I1007 07:57:26.696911 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-wk8rw" Oct 07 07:57:26 crc kubenswrapper[4875]: E1007 07:57:26.696935 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 07:57:26 crc kubenswrapper[4875]: E1007 07:57:26.697166 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-wk8rw" podUID="dce21abc-1295-4d45-bd26-07b7e37d674c" Oct 07 07:57:26 crc kubenswrapper[4875]: I1007 07:57:26.752595 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:26 crc kubenswrapper[4875]: I1007 07:57:26.752669 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:26 crc kubenswrapper[4875]: I1007 07:57:26.752687 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:26 crc kubenswrapper[4875]: I1007 07:57:26.752716 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:26 crc kubenswrapper[4875]: I1007 07:57:26.752735 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:26Z","lastTransitionTime":"2025-10-07T07:57:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:26 crc kubenswrapper[4875]: I1007 07:57:26.855466 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:26 crc kubenswrapper[4875]: I1007 07:57:26.855675 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:26 crc kubenswrapper[4875]: I1007 07:57:26.855742 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:26 crc kubenswrapper[4875]: I1007 07:57:26.855856 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:26 crc kubenswrapper[4875]: I1007 07:57:26.855965 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:26Z","lastTransitionTime":"2025-10-07T07:57:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:26 crc kubenswrapper[4875]: I1007 07:57:26.961724 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:26 crc kubenswrapper[4875]: I1007 07:57:26.961757 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:26 crc kubenswrapper[4875]: I1007 07:57:26.961766 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:26 crc kubenswrapper[4875]: I1007 07:57:26.961780 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:26 crc kubenswrapper[4875]: I1007 07:57:26.961792 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:26Z","lastTransitionTime":"2025-10-07T07:57:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:27 crc kubenswrapper[4875]: I1007 07:57:27.064212 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:27 crc kubenswrapper[4875]: I1007 07:57:27.064238 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:27 crc kubenswrapper[4875]: I1007 07:57:27.064257 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:27 crc kubenswrapper[4875]: I1007 07:57:27.064275 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:27 crc kubenswrapper[4875]: I1007 07:57:27.064284 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:27Z","lastTransitionTime":"2025-10-07T07:57:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:27 crc kubenswrapper[4875]: I1007 07:57:27.165699 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:27 crc kubenswrapper[4875]: I1007 07:57:27.165744 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:27 crc kubenswrapper[4875]: I1007 07:57:27.165755 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:27 crc kubenswrapper[4875]: I1007 07:57:27.165772 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:27 crc kubenswrapper[4875]: I1007 07:57:27.165781 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:27Z","lastTransitionTime":"2025-10-07T07:57:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:27 crc kubenswrapper[4875]: I1007 07:57:27.267717 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:27 crc kubenswrapper[4875]: I1007 07:57:27.267755 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:27 crc kubenswrapper[4875]: I1007 07:57:27.267764 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:27 crc kubenswrapper[4875]: I1007 07:57:27.267823 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:27 crc kubenswrapper[4875]: I1007 07:57:27.267835 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:27Z","lastTransitionTime":"2025-10-07T07:57:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:27 crc kubenswrapper[4875]: I1007 07:57:27.369954 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:27 crc kubenswrapper[4875]: I1007 07:57:27.369988 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:27 crc kubenswrapper[4875]: I1007 07:57:27.369997 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:27 crc kubenswrapper[4875]: I1007 07:57:27.370018 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:27 crc kubenswrapper[4875]: I1007 07:57:27.370028 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:27Z","lastTransitionTime":"2025-10-07T07:57:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:27 crc kubenswrapper[4875]: I1007 07:57:27.472267 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:27 crc kubenswrapper[4875]: I1007 07:57:27.472307 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:27 crc kubenswrapper[4875]: I1007 07:57:27.472318 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:27 crc kubenswrapper[4875]: I1007 07:57:27.472332 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:27 crc kubenswrapper[4875]: I1007 07:57:27.472341 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:27Z","lastTransitionTime":"2025-10-07T07:57:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:27 crc kubenswrapper[4875]: I1007 07:57:27.574452 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:27 crc kubenswrapper[4875]: I1007 07:57:27.574498 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:27 crc kubenswrapper[4875]: I1007 07:57:27.574509 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:27 crc kubenswrapper[4875]: I1007 07:57:27.574527 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:27 crc kubenswrapper[4875]: I1007 07:57:27.574538 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:27Z","lastTransitionTime":"2025-10-07T07:57:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:27 crc kubenswrapper[4875]: I1007 07:57:27.677447 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:27 crc kubenswrapper[4875]: I1007 07:57:27.677484 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:27 crc kubenswrapper[4875]: I1007 07:57:27.677499 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:27 crc kubenswrapper[4875]: I1007 07:57:27.677515 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:27 crc kubenswrapper[4875]: I1007 07:57:27.677529 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:27Z","lastTransitionTime":"2025-10-07T07:57:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:27 crc kubenswrapper[4875]: I1007 07:57:27.697233 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 07:57:27 crc kubenswrapper[4875]: E1007 07:57:27.697373 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 07:57:27 crc kubenswrapper[4875]: I1007 07:57:27.697426 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 07:57:27 crc kubenswrapper[4875]: E1007 07:57:27.697527 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 07:57:27 crc kubenswrapper[4875]: I1007 07:57:27.780251 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:27 crc kubenswrapper[4875]: I1007 07:57:27.780282 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:27 crc kubenswrapper[4875]: I1007 07:57:27.780292 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:27 crc kubenswrapper[4875]: I1007 07:57:27.780307 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:27 crc kubenswrapper[4875]: I1007 07:57:27.780317 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:27Z","lastTransitionTime":"2025-10-07T07:57:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:27 crc kubenswrapper[4875]: I1007 07:57:27.882726 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:27 crc kubenswrapper[4875]: I1007 07:57:27.882771 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:27 crc kubenswrapper[4875]: I1007 07:57:27.882781 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:27 crc kubenswrapper[4875]: I1007 07:57:27.882799 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:27 crc kubenswrapper[4875]: I1007 07:57:27.882811 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:27Z","lastTransitionTime":"2025-10-07T07:57:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:27 crc kubenswrapper[4875]: I1007 07:57:27.985077 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:27 crc kubenswrapper[4875]: I1007 07:57:27.985125 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:27 crc kubenswrapper[4875]: I1007 07:57:27.985136 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:27 crc kubenswrapper[4875]: I1007 07:57:27.985156 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:27 crc kubenswrapper[4875]: I1007 07:57:27.985166 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:27Z","lastTransitionTime":"2025-10-07T07:57:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:28 crc kubenswrapper[4875]: I1007 07:57:28.087680 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:28 crc kubenswrapper[4875]: I1007 07:57:28.087761 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:28 crc kubenswrapper[4875]: I1007 07:57:28.087779 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:28 crc kubenswrapper[4875]: I1007 07:57:28.087810 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:28 crc kubenswrapper[4875]: I1007 07:57:28.087827 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:28Z","lastTransitionTime":"2025-10-07T07:57:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:28 crc kubenswrapper[4875]: I1007 07:57:28.190677 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:28 crc kubenswrapper[4875]: I1007 07:57:28.190752 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:28 crc kubenswrapper[4875]: I1007 07:57:28.190762 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:28 crc kubenswrapper[4875]: I1007 07:57:28.190781 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:28 crc kubenswrapper[4875]: I1007 07:57:28.190793 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:28Z","lastTransitionTime":"2025-10-07T07:57:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:28 crc kubenswrapper[4875]: I1007 07:57:28.293701 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:28 crc kubenswrapper[4875]: I1007 07:57:28.293759 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:28 crc kubenswrapper[4875]: I1007 07:57:28.293769 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:28 crc kubenswrapper[4875]: I1007 07:57:28.293786 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:28 crc kubenswrapper[4875]: I1007 07:57:28.293796 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:28Z","lastTransitionTime":"2025-10-07T07:57:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:28 crc kubenswrapper[4875]: I1007 07:57:28.395896 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:28 crc kubenswrapper[4875]: I1007 07:57:28.395940 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:28 crc kubenswrapper[4875]: I1007 07:57:28.395949 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:28 crc kubenswrapper[4875]: I1007 07:57:28.395988 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:28 crc kubenswrapper[4875]: I1007 07:57:28.396004 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:28Z","lastTransitionTime":"2025-10-07T07:57:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:28 crc kubenswrapper[4875]: I1007 07:57:28.497727 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:28 crc kubenswrapper[4875]: I1007 07:57:28.497775 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:28 crc kubenswrapper[4875]: I1007 07:57:28.497792 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:28 crc kubenswrapper[4875]: I1007 07:57:28.497811 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:28 crc kubenswrapper[4875]: I1007 07:57:28.497822 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:28Z","lastTransitionTime":"2025-10-07T07:57:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:28 crc kubenswrapper[4875]: I1007 07:57:28.600002 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:28 crc kubenswrapper[4875]: I1007 07:57:28.600043 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:28 crc kubenswrapper[4875]: I1007 07:57:28.600055 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:28 crc kubenswrapper[4875]: I1007 07:57:28.600073 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:28 crc kubenswrapper[4875]: I1007 07:57:28.600083 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:28Z","lastTransitionTime":"2025-10-07T07:57:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:28 crc kubenswrapper[4875]: I1007 07:57:28.696697 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-wk8rw" Oct 07 07:57:28 crc kubenswrapper[4875]: E1007 07:57:28.696828 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-wk8rw" podUID="dce21abc-1295-4d45-bd26-07b7e37d674c" Oct 07 07:57:28 crc kubenswrapper[4875]: I1007 07:57:28.696982 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 07:57:28 crc kubenswrapper[4875]: E1007 07:57:28.697145 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 07:57:28 crc kubenswrapper[4875]: I1007 07:57:28.702945 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:28 crc kubenswrapper[4875]: I1007 07:57:28.702997 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:28 crc kubenswrapper[4875]: I1007 07:57:28.703014 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:28 crc kubenswrapper[4875]: I1007 07:57:28.703037 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:28 crc kubenswrapper[4875]: I1007 07:57:28.703123 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:28Z","lastTransitionTime":"2025-10-07T07:57:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:28 crc kubenswrapper[4875]: I1007 07:57:28.805741 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:28 crc kubenswrapper[4875]: I1007 07:57:28.805813 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:28 crc kubenswrapper[4875]: I1007 07:57:28.805836 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:28 crc kubenswrapper[4875]: I1007 07:57:28.805867 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:28 crc kubenswrapper[4875]: I1007 07:57:28.805928 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:28Z","lastTransitionTime":"2025-10-07T07:57:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:28 crc kubenswrapper[4875]: I1007 07:57:28.909267 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:28 crc kubenswrapper[4875]: I1007 07:57:28.909359 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:28 crc kubenswrapper[4875]: I1007 07:57:28.909385 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:28 crc kubenswrapper[4875]: I1007 07:57:28.909417 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:28 crc kubenswrapper[4875]: I1007 07:57:28.909442 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:28Z","lastTransitionTime":"2025-10-07T07:57:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:29 crc kubenswrapper[4875]: I1007 07:57:29.012448 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:29 crc kubenswrapper[4875]: I1007 07:57:29.012517 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:29 crc kubenswrapper[4875]: I1007 07:57:29.012527 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:29 crc kubenswrapper[4875]: I1007 07:57:29.012547 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:29 crc kubenswrapper[4875]: I1007 07:57:29.012561 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:29Z","lastTransitionTime":"2025-10-07T07:57:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:29 crc kubenswrapper[4875]: I1007 07:57:29.114705 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:29 crc kubenswrapper[4875]: I1007 07:57:29.114764 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:29 crc kubenswrapper[4875]: I1007 07:57:29.114774 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:29 crc kubenswrapper[4875]: I1007 07:57:29.114795 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:29 crc kubenswrapper[4875]: I1007 07:57:29.114808 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:29Z","lastTransitionTime":"2025-10-07T07:57:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:29 crc kubenswrapper[4875]: I1007 07:57:29.217024 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:29 crc kubenswrapper[4875]: I1007 07:57:29.217081 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:29 crc kubenswrapper[4875]: I1007 07:57:29.217091 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:29 crc kubenswrapper[4875]: I1007 07:57:29.217119 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:29 crc kubenswrapper[4875]: I1007 07:57:29.217133 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:29Z","lastTransitionTime":"2025-10-07T07:57:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:29 crc kubenswrapper[4875]: I1007 07:57:29.319556 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:29 crc kubenswrapper[4875]: I1007 07:57:29.319588 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:29 crc kubenswrapper[4875]: I1007 07:57:29.319597 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:29 crc kubenswrapper[4875]: I1007 07:57:29.319610 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:29 crc kubenswrapper[4875]: I1007 07:57:29.319619 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:29Z","lastTransitionTime":"2025-10-07T07:57:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:29 crc kubenswrapper[4875]: I1007 07:57:29.421306 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:29 crc kubenswrapper[4875]: I1007 07:57:29.421354 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:29 crc kubenswrapper[4875]: I1007 07:57:29.421365 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:29 crc kubenswrapper[4875]: I1007 07:57:29.421379 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:29 crc kubenswrapper[4875]: I1007 07:57:29.421387 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:29Z","lastTransitionTime":"2025-10-07T07:57:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:29 crc kubenswrapper[4875]: I1007 07:57:29.523588 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:29 crc kubenswrapper[4875]: I1007 07:57:29.523628 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:29 crc kubenswrapper[4875]: I1007 07:57:29.523643 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:29 crc kubenswrapper[4875]: I1007 07:57:29.523671 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:29 crc kubenswrapper[4875]: I1007 07:57:29.523690 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:29Z","lastTransitionTime":"2025-10-07T07:57:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:29 crc kubenswrapper[4875]: I1007 07:57:29.626304 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:29 crc kubenswrapper[4875]: I1007 07:57:29.626343 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:29 crc kubenswrapper[4875]: I1007 07:57:29.626352 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:29 crc kubenswrapper[4875]: I1007 07:57:29.626368 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:29 crc kubenswrapper[4875]: I1007 07:57:29.626377 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:29Z","lastTransitionTime":"2025-10-07T07:57:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:29 crc kubenswrapper[4875]: I1007 07:57:29.697117 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 07:57:29 crc kubenswrapper[4875]: I1007 07:57:29.697191 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 07:57:29 crc kubenswrapper[4875]: E1007 07:57:29.697299 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 07:57:29 crc kubenswrapper[4875]: E1007 07:57:29.697391 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 07:57:29 crc kubenswrapper[4875]: I1007 07:57:29.728407 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:29 crc kubenswrapper[4875]: I1007 07:57:29.728457 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:29 crc kubenswrapper[4875]: I1007 07:57:29.728469 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:29 crc kubenswrapper[4875]: I1007 07:57:29.728486 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:29 crc kubenswrapper[4875]: I1007 07:57:29.728498 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:29Z","lastTransitionTime":"2025-10-07T07:57:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:29 crc kubenswrapper[4875]: I1007 07:57:29.831168 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:29 crc kubenswrapper[4875]: I1007 07:57:29.831237 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:29 crc kubenswrapper[4875]: I1007 07:57:29.831261 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:29 crc kubenswrapper[4875]: I1007 07:57:29.831290 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:29 crc kubenswrapper[4875]: I1007 07:57:29.831311 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:29Z","lastTransitionTime":"2025-10-07T07:57:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:29 crc kubenswrapper[4875]: I1007 07:57:29.933239 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:29 crc kubenswrapper[4875]: I1007 07:57:29.933281 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:29 crc kubenswrapper[4875]: I1007 07:57:29.933291 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:29 crc kubenswrapper[4875]: I1007 07:57:29.933309 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:29 crc kubenswrapper[4875]: I1007 07:57:29.933322 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:29Z","lastTransitionTime":"2025-10-07T07:57:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:30 crc kubenswrapper[4875]: I1007 07:57:30.035304 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:30 crc kubenswrapper[4875]: I1007 07:57:30.035352 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:30 crc kubenswrapper[4875]: I1007 07:57:30.035364 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:30 crc kubenswrapper[4875]: I1007 07:57:30.035382 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:30 crc kubenswrapper[4875]: I1007 07:57:30.035392 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:30Z","lastTransitionTime":"2025-10-07T07:57:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:30 crc kubenswrapper[4875]: I1007 07:57:30.136985 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:30 crc kubenswrapper[4875]: I1007 07:57:30.137064 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:30 crc kubenswrapper[4875]: I1007 07:57:30.137077 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:30 crc kubenswrapper[4875]: I1007 07:57:30.137093 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:30 crc kubenswrapper[4875]: I1007 07:57:30.137104 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:30Z","lastTransitionTime":"2025-10-07T07:57:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:30 crc kubenswrapper[4875]: I1007 07:57:30.239629 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:30 crc kubenswrapper[4875]: I1007 07:57:30.239663 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:30 crc kubenswrapper[4875]: I1007 07:57:30.239671 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:30 crc kubenswrapper[4875]: I1007 07:57:30.239684 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:30 crc kubenswrapper[4875]: I1007 07:57:30.239694 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:30Z","lastTransitionTime":"2025-10-07T07:57:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:30 crc kubenswrapper[4875]: I1007 07:57:30.341859 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:30 crc kubenswrapper[4875]: I1007 07:57:30.341910 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:30 crc kubenswrapper[4875]: I1007 07:57:30.341922 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:30 crc kubenswrapper[4875]: I1007 07:57:30.341937 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:30 crc kubenswrapper[4875]: I1007 07:57:30.341946 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:30Z","lastTransitionTime":"2025-10-07T07:57:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:30 crc kubenswrapper[4875]: I1007 07:57:30.444738 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:30 crc kubenswrapper[4875]: I1007 07:57:30.444777 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:30 crc kubenswrapper[4875]: I1007 07:57:30.444785 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:30 crc kubenswrapper[4875]: I1007 07:57:30.444801 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:30 crc kubenswrapper[4875]: I1007 07:57:30.444810 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:30Z","lastTransitionTime":"2025-10-07T07:57:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:30 crc kubenswrapper[4875]: I1007 07:57:30.547436 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:30 crc kubenswrapper[4875]: I1007 07:57:30.547482 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:30 crc kubenswrapper[4875]: I1007 07:57:30.547491 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:30 crc kubenswrapper[4875]: I1007 07:57:30.547507 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:30 crc kubenswrapper[4875]: I1007 07:57:30.547519 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:30Z","lastTransitionTime":"2025-10-07T07:57:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:30 crc kubenswrapper[4875]: I1007 07:57:30.649946 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:30 crc kubenswrapper[4875]: I1007 07:57:30.650011 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:30 crc kubenswrapper[4875]: I1007 07:57:30.650026 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:30 crc kubenswrapper[4875]: I1007 07:57:30.650042 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:30 crc kubenswrapper[4875]: I1007 07:57:30.650069 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:30Z","lastTransitionTime":"2025-10-07T07:57:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:30 crc kubenswrapper[4875]: I1007 07:57:30.696418 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-wk8rw" Oct 07 07:57:30 crc kubenswrapper[4875]: I1007 07:57:30.696469 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 07:57:30 crc kubenswrapper[4875]: E1007 07:57:30.696549 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-wk8rw" podUID="dce21abc-1295-4d45-bd26-07b7e37d674c" Oct 07 07:57:30 crc kubenswrapper[4875]: E1007 07:57:30.696685 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 07:57:30 crc kubenswrapper[4875]: I1007 07:57:30.752622 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:30 crc kubenswrapper[4875]: I1007 07:57:30.752660 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:30 crc kubenswrapper[4875]: I1007 07:57:30.752671 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:30 crc kubenswrapper[4875]: I1007 07:57:30.752685 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:30 crc kubenswrapper[4875]: I1007 07:57:30.752693 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:30Z","lastTransitionTime":"2025-10-07T07:57:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:30 crc kubenswrapper[4875]: I1007 07:57:30.854569 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:30 crc kubenswrapper[4875]: I1007 07:57:30.854612 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:30 crc kubenswrapper[4875]: I1007 07:57:30.854625 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:30 crc kubenswrapper[4875]: I1007 07:57:30.854638 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:30 crc kubenswrapper[4875]: I1007 07:57:30.854647 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:30Z","lastTransitionTime":"2025-10-07T07:57:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:30 crc kubenswrapper[4875]: I1007 07:57:30.956855 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:30 crc kubenswrapper[4875]: I1007 07:57:30.956903 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:30 crc kubenswrapper[4875]: I1007 07:57:30.956913 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:30 crc kubenswrapper[4875]: I1007 07:57:30.956927 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:30 crc kubenswrapper[4875]: I1007 07:57:30.956936 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:30Z","lastTransitionTime":"2025-10-07T07:57:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:31 crc kubenswrapper[4875]: I1007 07:57:31.059572 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:31 crc kubenswrapper[4875]: I1007 07:57:31.059615 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:31 crc kubenswrapper[4875]: I1007 07:57:31.059626 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:31 crc kubenswrapper[4875]: I1007 07:57:31.059640 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:31 crc kubenswrapper[4875]: I1007 07:57:31.059650 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:31Z","lastTransitionTime":"2025-10-07T07:57:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:31 crc kubenswrapper[4875]: I1007 07:57:31.161648 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:31 crc kubenswrapper[4875]: I1007 07:57:31.161695 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:31 crc kubenswrapper[4875]: I1007 07:57:31.161706 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:31 crc kubenswrapper[4875]: I1007 07:57:31.161723 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:31 crc kubenswrapper[4875]: I1007 07:57:31.161733 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:31Z","lastTransitionTime":"2025-10-07T07:57:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:31 crc kubenswrapper[4875]: I1007 07:57:31.263783 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:31 crc kubenswrapper[4875]: I1007 07:57:31.263827 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:31 crc kubenswrapper[4875]: I1007 07:57:31.263838 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:31 crc kubenswrapper[4875]: I1007 07:57:31.263855 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:31 crc kubenswrapper[4875]: I1007 07:57:31.263867 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:31Z","lastTransitionTime":"2025-10-07T07:57:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:31 crc kubenswrapper[4875]: I1007 07:57:31.366712 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:31 crc kubenswrapper[4875]: I1007 07:57:31.366754 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:31 crc kubenswrapper[4875]: I1007 07:57:31.366766 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:31 crc kubenswrapper[4875]: I1007 07:57:31.366783 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:31 crc kubenswrapper[4875]: I1007 07:57:31.366793 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:31Z","lastTransitionTime":"2025-10-07T07:57:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:31 crc kubenswrapper[4875]: I1007 07:57:31.468704 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:31 crc kubenswrapper[4875]: I1007 07:57:31.468740 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:31 crc kubenswrapper[4875]: I1007 07:57:31.468750 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:31 crc kubenswrapper[4875]: I1007 07:57:31.468765 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:31 crc kubenswrapper[4875]: I1007 07:57:31.468776 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:31Z","lastTransitionTime":"2025-10-07T07:57:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:31 crc kubenswrapper[4875]: I1007 07:57:31.571776 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:31 crc kubenswrapper[4875]: I1007 07:57:31.571844 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:31 crc kubenswrapper[4875]: I1007 07:57:31.571857 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:31 crc kubenswrapper[4875]: I1007 07:57:31.571871 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:31 crc kubenswrapper[4875]: I1007 07:57:31.571903 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:31Z","lastTransitionTime":"2025-10-07T07:57:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:31 crc kubenswrapper[4875]: I1007 07:57:31.674403 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:31 crc kubenswrapper[4875]: I1007 07:57:31.674451 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:31 crc kubenswrapper[4875]: I1007 07:57:31.674477 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:31 crc kubenswrapper[4875]: I1007 07:57:31.674497 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:31 crc kubenswrapper[4875]: I1007 07:57:31.674511 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:31Z","lastTransitionTime":"2025-10-07T07:57:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:31 crc kubenswrapper[4875]: I1007 07:57:31.696831 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 07:57:31 crc kubenswrapper[4875]: I1007 07:57:31.696862 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 07:57:31 crc kubenswrapper[4875]: E1007 07:57:31.696961 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 07:57:31 crc kubenswrapper[4875]: E1007 07:57:31.697056 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 07:57:31 crc kubenswrapper[4875]: I1007 07:57:31.777053 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:31 crc kubenswrapper[4875]: I1007 07:57:31.777094 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:31 crc kubenswrapper[4875]: I1007 07:57:31.777106 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:31 crc kubenswrapper[4875]: I1007 07:57:31.777121 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:31 crc kubenswrapper[4875]: I1007 07:57:31.777131 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:31Z","lastTransitionTime":"2025-10-07T07:57:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:31 crc kubenswrapper[4875]: I1007 07:57:31.878989 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:31 crc kubenswrapper[4875]: I1007 07:57:31.879030 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:31 crc kubenswrapper[4875]: I1007 07:57:31.879040 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:31 crc kubenswrapper[4875]: I1007 07:57:31.879054 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:31 crc kubenswrapper[4875]: I1007 07:57:31.879065 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:31Z","lastTransitionTime":"2025-10-07T07:57:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:31 crc kubenswrapper[4875]: I1007 07:57:31.980827 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:31 crc kubenswrapper[4875]: I1007 07:57:31.980858 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:31 crc kubenswrapper[4875]: I1007 07:57:31.980866 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:31 crc kubenswrapper[4875]: I1007 07:57:31.980905 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:31 crc kubenswrapper[4875]: I1007 07:57:31.980915 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:31Z","lastTransitionTime":"2025-10-07T07:57:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:32 crc kubenswrapper[4875]: I1007 07:57:32.083381 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:32 crc kubenswrapper[4875]: I1007 07:57:32.083420 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:32 crc kubenswrapper[4875]: I1007 07:57:32.083431 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:32 crc kubenswrapper[4875]: I1007 07:57:32.083447 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:32 crc kubenswrapper[4875]: I1007 07:57:32.083458 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:32Z","lastTransitionTime":"2025-10-07T07:57:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:32 crc kubenswrapper[4875]: I1007 07:57:32.185128 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:32 crc kubenswrapper[4875]: I1007 07:57:32.185159 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:32 crc kubenswrapper[4875]: I1007 07:57:32.185167 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:32 crc kubenswrapper[4875]: I1007 07:57:32.185178 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:32 crc kubenswrapper[4875]: I1007 07:57:32.185188 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:32Z","lastTransitionTime":"2025-10-07T07:57:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:32 crc kubenswrapper[4875]: I1007 07:57:32.287376 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:32 crc kubenswrapper[4875]: I1007 07:57:32.287424 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:32 crc kubenswrapper[4875]: I1007 07:57:32.287435 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:32 crc kubenswrapper[4875]: I1007 07:57:32.287449 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:32 crc kubenswrapper[4875]: I1007 07:57:32.287462 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:32Z","lastTransitionTime":"2025-10-07T07:57:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:32 crc kubenswrapper[4875]: I1007 07:57:32.390028 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:32 crc kubenswrapper[4875]: I1007 07:57:32.390103 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:32 crc kubenswrapper[4875]: I1007 07:57:32.390116 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:32 crc kubenswrapper[4875]: I1007 07:57:32.390138 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:32 crc kubenswrapper[4875]: I1007 07:57:32.390149 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:32Z","lastTransitionTime":"2025-10-07T07:57:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:32 crc kubenswrapper[4875]: I1007 07:57:32.492665 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:32 crc kubenswrapper[4875]: I1007 07:57:32.492711 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:32 crc kubenswrapper[4875]: I1007 07:57:32.492722 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:32 crc kubenswrapper[4875]: I1007 07:57:32.492739 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:32 crc kubenswrapper[4875]: I1007 07:57:32.492750 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:32Z","lastTransitionTime":"2025-10-07T07:57:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:32 crc kubenswrapper[4875]: I1007 07:57:32.595584 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:32 crc kubenswrapper[4875]: I1007 07:57:32.595628 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:32 crc kubenswrapper[4875]: I1007 07:57:32.595643 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:32 crc kubenswrapper[4875]: I1007 07:57:32.595660 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:32 crc kubenswrapper[4875]: I1007 07:57:32.595672 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:32Z","lastTransitionTime":"2025-10-07T07:57:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:32 crc kubenswrapper[4875]: I1007 07:57:32.697013 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 07:57:32 crc kubenswrapper[4875]: I1007 07:57:32.697099 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-wk8rw" Oct 07 07:57:32 crc kubenswrapper[4875]: E1007 07:57:32.697155 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 07:57:32 crc kubenswrapper[4875]: E1007 07:57:32.697323 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-wk8rw" podUID="dce21abc-1295-4d45-bd26-07b7e37d674c" Oct 07 07:57:32 crc kubenswrapper[4875]: I1007 07:57:32.698866 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:32 crc kubenswrapper[4875]: I1007 07:57:32.698918 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:32 crc kubenswrapper[4875]: I1007 07:57:32.698927 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:32 crc kubenswrapper[4875]: I1007 07:57:32.698940 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:32 crc kubenswrapper[4875]: I1007 07:57:32.698951 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:32Z","lastTransitionTime":"2025-10-07T07:57:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:32 crc kubenswrapper[4875]: I1007 07:57:32.801321 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:32 crc kubenswrapper[4875]: I1007 07:57:32.801363 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:32 crc kubenswrapper[4875]: I1007 07:57:32.801373 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:32 crc kubenswrapper[4875]: I1007 07:57:32.801389 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:32 crc kubenswrapper[4875]: I1007 07:57:32.801399 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:32Z","lastTransitionTime":"2025-10-07T07:57:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:32 crc kubenswrapper[4875]: I1007 07:57:32.903709 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:32 crc kubenswrapper[4875]: I1007 07:57:32.903757 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:32 crc kubenswrapper[4875]: I1007 07:57:32.903768 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:32 crc kubenswrapper[4875]: I1007 07:57:32.903784 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:32 crc kubenswrapper[4875]: I1007 07:57:32.903795 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:32Z","lastTransitionTime":"2025-10-07T07:57:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:33 crc kubenswrapper[4875]: I1007 07:57:33.005965 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:33 crc kubenswrapper[4875]: I1007 07:57:33.006028 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:33 crc kubenswrapper[4875]: I1007 07:57:33.006038 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:33 crc kubenswrapper[4875]: I1007 07:57:33.006054 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:33 crc kubenswrapper[4875]: I1007 07:57:33.006067 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:33Z","lastTransitionTime":"2025-10-07T07:57:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:33 crc kubenswrapper[4875]: I1007 07:57:33.037535 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:33 crc kubenswrapper[4875]: I1007 07:57:33.037581 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:33 crc kubenswrapper[4875]: I1007 07:57:33.037592 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:33 crc kubenswrapper[4875]: I1007 07:57:33.037608 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:33 crc kubenswrapper[4875]: I1007 07:57:33.037620 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:33Z","lastTransitionTime":"2025-10-07T07:57:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:33 crc kubenswrapper[4875]: E1007 07:57:33.049603 4875 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T07:57:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:33Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T07:57:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:33Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T07:57:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:33Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T07:57:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:33Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"e390eb55-bfef-4c82-ba02-53426a3fd939\\\",\\\"systemUUID\\\":\\\"288fc606-a984-4194-ac49-303e4a239cb9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:33Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:33 crc kubenswrapper[4875]: I1007 07:57:33.053732 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:33 crc kubenswrapper[4875]: I1007 07:57:33.053756 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:33 crc kubenswrapper[4875]: I1007 07:57:33.053765 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:33 crc kubenswrapper[4875]: I1007 07:57:33.053779 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:33 crc kubenswrapper[4875]: I1007 07:57:33.053787 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:33Z","lastTransitionTime":"2025-10-07T07:57:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:33 crc kubenswrapper[4875]: E1007 07:57:33.064941 4875 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T07:57:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:33Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T07:57:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:33Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T07:57:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:33Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T07:57:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:33Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"e390eb55-bfef-4c82-ba02-53426a3fd939\\\",\\\"systemUUID\\\":\\\"288fc606-a984-4194-ac49-303e4a239cb9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:33Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:33 crc kubenswrapper[4875]: I1007 07:57:33.069696 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:33 crc kubenswrapper[4875]: I1007 07:57:33.069726 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:33 crc kubenswrapper[4875]: I1007 07:57:33.069734 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:33 crc kubenswrapper[4875]: I1007 07:57:33.069747 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:33 crc kubenswrapper[4875]: I1007 07:57:33.069756 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:33Z","lastTransitionTime":"2025-10-07T07:57:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:33 crc kubenswrapper[4875]: E1007 07:57:33.081414 4875 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T07:57:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:33Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T07:57:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:33Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T07:57:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:33Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T07:57:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:33Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"e390eb55-bfef-4c82-ba02-53426a3fd939\\\",\\\"systemUUID\\\":\\\"288fc606-a984-4194-ac49-303e4a239cb9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:33Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:33 crc kubenswrapper[4875]: I1007 07:57:33.084651 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:33 crc kubenswrapper[4875]: I1007 07:57:33.084678 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:33 crc kubenswrapper[4875]: I1007 07:57:33.084689 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:33 crc kubenswrapper[4875]: I1007 07:57:33.084702 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:33 crc kubenswrapper[4875]: I1007 07:57:33.084711 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:33Z","lastTransitionTime":"2025-10-07T07:57:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:33 crc kubenswrapper[4875]: E1007 07:57:33.096335 4875 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T07:57:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:33Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T07:57:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:33Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T07:57:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:33Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T07:57:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:33Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"e390eb55-bfef-4c82-ba02-53426a3fd939\\\",\\\"systemUUID\\\":\\\"288fc606-a984-4194-ac49-303e4a239cb9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:33Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:33 crc kubenswrapper[4875]: I1007 07:57:33.100280 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:33 crc kubenswrapper[4875]: I1007 07:57:33.100306 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:33 crc kubenswrapper[4875]: I1007 07:57:33.100315 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:33 crc kubenswrapper[4875]: I1007 07:57:33.100328 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:33 crc kubenswrapper[4875]: I1007 07:57:33.100337 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:33Z","lastTransitionTime":"2025-10-07T07:57:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:33 crc kubenswrapper[4875]: E1007 07:57:33.112920 4875 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T07:57:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:33Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T07:57:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:33Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T07:57:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:33Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T07:57:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:33Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"e390eb55-bfef-4c82-ba02-53426a3fd939\\\",\\\"systemUUID\\\":\\\"288fc606-a984-4194-ac49-303e4a239cb9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:33Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:33 crc kubenswrapper[4875]: E1007 07:57:33.113065 4875 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Oct 07 07:57:33 crc kubenswrapper[4875]: I1007 07:57:33.114504 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:33 crc kubenswrapper[4875]: I1007 07:57:33.114553 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:33 crc kubenswrapper[4875]: I1007 07:57:33.114564 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:33 crc kubenswrapper[4875]: I1007 07:57:33.114579 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:33 crc kubenswrapper[4875]: I1007 07:57:33.114593 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:33Z","lastTransitionTime":"2025-10-07T07:57:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:33 crc kubenswrapper[4875]: I1007 07:57:33.217575 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:33 crc kubenswrapper[4875]: I1007 07:57:33.217677 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:33 crc kubenswrapper[4875]: I1007 07:57:33.217706 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:33 crc kubenswrapper[4875]: I1007 07:57:33.217748 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:33 crc kubenswrapper[4875]: I1007 07:57:33.217778 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:33Z","lastTransitionTime":"2025-10-07T07:57:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:33 crc kubenswrapper[4875]: I1007 07:57:33.321528 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:33 crc kubenswrapper[4875]: I1007 07:57:33.321606 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:33 crc kubenswrapper[4875]: I1007 07:57:33.321626 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:33 crc kubenswrapper[4875]: I1007 07:57:33.321656 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:33 crc kubenswrapper[4875]: I1007 07:57:33.321676 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:33Z","lastTransitionTime":"2025-10-07T07:57:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:33 crc kubenswrapper[4875]: I1007 07:57:33.424817 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:33 crc kubenswrapper[4875]: I1007 07:57:33.424899 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:33 crc kubenswrapper[4875]: I1007 07:57:33.424918 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:33 crc kubenswrapper[4875]: I1007 07:57:33.424940 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:33 crc kubenswrapper[4875]: I1007 07:57:33.424955 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:33Z","lastTransitionTime":"2025-10-07T07:57:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:33 crc kubenswrapper[4875]: I1007 07:57:33.527667 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:33 crc kubenswrapper[4875]: I1007 07:57:33.527717 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:33 crc kubenswrapper[4875]: I1007 07:57:33.527725 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:33 crc kubenswrapper[4875]: I1007 07:57:33.527744 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:33 crc kubenswrapper[4875]: I1007 07:57:33.527754 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:33Z","lastTransitionTime":"2025-10-07T07:57:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:33 crc kubenswrapper[4875]: I1007 07:57:33.630987 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:33 crc kubenswrapper[4875]: I1007 07:57:33.631047 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:33 crc kubenswrapper[4875]: I1007 07:57:33.631059 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:33 crc kubenswrapper[4875]: I1007 07:57:33.631106 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:33 crc kubenswrapper[4875]: I1007 07:57:33.631121 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:33Z","lastTransitionTime":"2025-10-07T07:57:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:33 crc kubenswrapper[4875]: I1007 07:57:33.697018 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 07:57:33 crc kubenswrapper[4875]: I1007 07:57:33.697076 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 07:57:33 crc kubenswrapper[4875]: E1007 07:57:33.697313 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 07:57:33 crc kubenswrapper[4875]: E1007 07:57:33.697451 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 07:57:33 crc kubenswrapper[4875]: I1007 07:57:33.733914 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:33 crc kubenswrapper[4875]: I1007 07:57:33.733981 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:33 crc kubenswrapper[4875]: I1007 07:57:33.733992 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:33 crc kubenswrapper[4875]: I1007 07:57:33.734017 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:33 crc kubenswrapper[4875]: I1007 07:57:33.734028 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:33Z","lastTransitionTime":"2025-10-07T07:57:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:33 crc kubenswrapper[4875]: I1007 07:57:33.836525 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:33 crc kubenswrapper[4875]: I1007 07:57:33.836580 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:33 crc kubenswrapper[4875]: I1007 07:57:33.836593 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:33 crc kubenswrapper[4875]: I1007 07:57:33.836612 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:33 crc kubenswrapper[4875]: I1007 07:57:33.836631 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:33Z","lastTransitionTime":"2025-10-07T07:57:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:33 crc kubenswrapper[4875]: I1007 07:57:33.938899 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:33 crc kubenswrapper[4875]: I1007 07:57:33.938948 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:33 crc kubenswrapper[4875]: I1007 07:57:33.938959 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:33 crc kubenswrapper[4875]: I1007 07:57:33.938976 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:33 crc kubenswrapper[4875]: I1007 07:57:33.938987 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:33Z","lastTransitionTime":"2025-10-07T07:57:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:34 crc kubenswrapper[4875]: I1007 07:57:34.041316 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:34 crc kubenswrapper[4875]: I1007 07:57:34.041365 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:34 crc kubenswrapper[4875]: I1007 07:57:34.041373 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:34 crc kubenswrapper[4875]: I1007 07:57:34.041386 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:34 crc kubenswrapper[4875]: I1007 07:57:34.041395 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:34Z","lastTransitionTime":"2025-10-07T07:57:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:34 crc kubenswrapper[4875]: I1007 07:57:34.143646 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:34 crc kubenswrapper[4875]: I1007 07:57:34.143688 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:34 crc kubenswrapper[4875]: I1007 07:57:34.143699 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:34 crc kubenswrapper[4875]: I1007 07:57:34.143717 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:34 crc kubenswrapper[4875]: I1007 07:57:34.143729 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:34Z","lastTransitionTime":"2025-10-07T07:57:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:34 crc kubenswrapper[4875]: I1007 07:57:34.245832 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:34 crc kubenswrapper[4875]: I1007 07:57:34.245874 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:34 crc kubenswrapper[4875]: I1007 07:57:34.245907 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:34 crc kubenswrapper[4875]: I1007 07:57:34.245926 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:34 crc kubenswrapper[4875]: I1007 07:57:34.245937 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:34Z","lastTransitionTime":"2025-10-07T07:57:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:34 crc kubenswrapper[4875]: I1007 07:57:34.347973 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:34 crc kubenswrapper[4875]: I1007 07:57:34.348499 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:34 crc kubenswrapper[4875]: I1007 07:57:34.348593 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:34 crc kubenswrapper[4875]: I1007 07:57:34.348690 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:34 crc kubenswrapper[4875]: I1007 07:57:34.348767 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:34Z","lastTransitionTime":"2025-10-07T07:57:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:34 crc kubenswrapper[4875]: I1007 07:57:34.451611 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:34 crc kubenswrapper[4875]: I1007 07:57:34.451845 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:34 crc kubenswrapper[4875]: I1007 07:57:34.451983 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:34 crc kubenswrapper[4875]: I1007 07:57:34.452092 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:34 crc kubenswrapper[4875]: I1007 07:57:34.452191 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:34Z","lastTransitionTime":"2025-10-07T07:57:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:34 crc kubenswrapper[4875]: I1007 07:57:34.554490 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:34 crc kubenswrapper[4875]: I1007 07:57:34.554803 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:34 crc kubenswrapper[4875]: I1007 07:57:34.554869 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:34 crc kubenswrapper[4875]: I1007 07:57:34.554972 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:34 crc kubenswrapper[4875]: I1007 07:57:34.555034 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:34Z","lastTransitionTime":"2025-10-07T07:57:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:34 crc kubenswrapper[4875]: I1007 07:57:34.657370 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:34 crc kubenswrapper[4875]: I1007 07:57:34.657406 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:34 crc kubenswrapper[4875]: I1007 07:57:34.657416 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:34 crc kubenswrapper[4875]: I1007 07:57:34.657433 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:34 crc kubenswrapper[4875]: I1007 07:57:34.657444 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:34Z","lastTransitionTime":"2025-10-07T07:57:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:34 crc kubenswrapper[4875]: I1007 07:57:34.696968 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-wk8rw" Oct 07 07:57:34 crc kubenswrapper[4875]: E1007 07:57:34.697095 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-wk8rw" podUID="dce21abc-1295-4d45-bd26-07b7e37d674c" Oct 07 07:57:34 crc kubenswrapper[4875]: I1007 07:57:34.697002 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 07:57:34 crc kubenswrapper[4875]: E1007 07:57:34.697518 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 07:57:34 crc kubenswrapper[4875]: I1007 07:57:34.759682 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:34 crc kubenswrapper[4875]: I1007 07:57:34.759713 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:34 crc kubenswrapper[4875]: I1007 07:57:34.759723 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:34 crc kubenswrapper[4875]: I1007 07:57:34.759737 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:34 crc kubenswrapper[4875]: I1007 07:57:34.759747 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:34Z","lastTransitionTime":"2025-10-07T07:57:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:34 crc kubenswrapper[4875]: I1007 07:57:34.862163 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:34 crc kubenswrapper[4875]: I1007 07:57:34.862529 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:34 crc kubenswrapper[4875]: I1007 07:57:34.862643 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:34 crc kubenswrapper[4875]: I1007 07:57:34.862756 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:34 crc kubenswrapper[4875]: I1007 07:57:34.862865 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:34Z","lastTransitionTime":"2025-10-07T07:57:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:34 crc kubenswrapper[4875]: I1007 07:57:34.964548 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:34 crc kubenswrapper[4875]: I1007 07:57:34.964577 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:34 crc kubenswrapper[4875]: I1007 07:57:34.964587 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:34 crc kubenswrapper[4875]: I1007 07:57:34.964601 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:34 crc kubenswrapper[4875]: I1007 07:57:34.964611 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:34Z","lastTransitionTime":"2025-10-07T07:57:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:35 crc kubenswrapper[4875]: I1007 07:57:35.067004 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:35 crc kubenswrapper[4875]: I1007 07:57:35.067061 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:35 crc kubenswrapper[4875]: I1007 07:57:35.067073 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:35 crc kubenswrapper[4875]: I1007 07:57:35.067093 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:35 crc kubenswrapper[4875]: I1007 07:57:35.067105 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:35Z","lastTransitionTime":"2025-10-07T07:57:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:35 crc kubenswrapper[4875]: I1007 07:57:35.168801 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:35 crc kubenswrapper[4875]: I1007 07:57:35.168852 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:35 crc kubenswrapper[4875]: I1007 07:57:35.168870 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:35 crc kubenswrapper[4875]: I1007 07:57:35.168939 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:35 crc kubenswrapper[4875]: I1007 07:57:35.168951 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:35Z","lastTransitionTime":"2025-10-07T07:57:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:35 crc kubenswrapper[4875]: I1007 07:57:35.271072 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:35 crc kubenswrapper[4875]: I1007 07:57:35.271116 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:35 crc kubenswrapper[4875]: I1007 07:57:35.271127 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:35 crc kubenswrapper[4875]: I1007 07:57:35.271143 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:35 crc kubenswrapper[4875]: I1007 07:57:35.271156 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:35Z","lastTransitionTime":"2025-10-07T07:57:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:35 crc kubenswrapper[4875]: I1007 07:57:35.373408 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:35 crc kubenswrapper[4875]: I1007 07:57:35.373446 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:35 crc kubenswrapper[4875]: I1007 07:57:35.373454 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:35 crc kubenswrapper[4875]: I1007 07:57:35.373468 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:35 crc kubenswrapper[4875]: I1007 07:57:35.373479 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:35Z","lastTransitionTime":"2025-10-07T07:57:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:35 crc kubenswrapper[4875]: I1007 07:57:35.475648 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:35 crc kubenswrapper[4875]: I1007 07:57:35.475688 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:35 crc kubenswrapper[4875]: I1007 07:57:35.475699 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:35 crc kubenswrapper[4875]: I1007 07:57:35.475714 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:35 crc kubenswrapper[4875]: I1007 07:57:35.475723 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:35Z","lastTransitionTime":"2025-10-07T07:57:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:35 crc kubenswrapper[4875]: I1007 07:57:35.578116 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:35 crc kubenswrapper[4875]: I1007 07:57:35.578154 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:35 crc kubenswrapper[4875]: I1007 07:57:35.578163 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:35 crc kubenswrapper[4875]: I1007 07:57:35.578177 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:35 crc kubenswrapper[4875]: I1007 07:57:35.578186 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:35Z","lastTransitionTime":"2025-10-07T07:57:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:35 crc kubenswrapper[4875]: I1007 07:57:35.680447 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:35 crc kubenswrapper[4875]: I1007 07:57:35.680479 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:35 crc kubenswrapper[4875]: I1007 07:57:35.680488 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:35 crc kubenswrapper[4875]: I1007 07:57:35.680502 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:35 crc kubenswrapper[4875]: I1007 07:57:35.680515 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:35Z","lastTransitionTime":"2025-10-07T07:57:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:35 crc kubenswrapper[4875]: I1007 07:57:35.697598 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 07:57:35 crc kubenswrapper[4875]: E1007 07:57:35.697707 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 07:57:35 crc kubenswrapper[4875]: I1007 07:57:35.697941 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 07:57:35 crc kubenswrapper[4875]: E1007 07:57:35.697993 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 07:57:35 crc kubenswrapper[4875]: I1007 07:57:35.709736 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rbr4j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1d636971-3387-4f3c-b4a1-54a1da1e2fbe\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c836128e294716bdbbc7bf57f81f71a88656e83be509510722d13b3f66e7d3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7bd9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://12ef6aa200be5c0cc9b34435271b54e4852ad9118d7334d57e07c1f6f75100f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7bd9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:48Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-rbr4j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:35Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:35 crc kubenswrapper[4875]: I1007 07:57:35.723040 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"01cd6f59-4f16-41d3-b43b-cd3cb9efd9b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ca1c70fa4c2dd9e87cc15766e27c2735768f392d019d6004ae50c94595651c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4bed5cf79847998d3d72c69b2f5773d64affb1a1f13ecc3af99bda8307bb8d1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ba6ab5ec6a70a00059ba119970c6b139879c3c568475f2acf45b00f4ade9e22\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10e2645f8bd2ed4e578809e1a15c65b5a0bf19a2bc135dd5f626b2b3dfce0113\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://671f4d3e518bf0230af61a6bfc3f70b8cea7f0e8f972ad605a78cded0306b084\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"message\\\":\\\"le observer\\\\nW1007 07:56:35.379471 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1007 07:56:35.379613 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1007 07:56:35.380419 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1733405231/tls.crt::/tmp/serving-cert-1733405231/tls.key\\\\\\\"\\\\nI1007 07:56:35.960385 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1007 07:56:35.963357 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1007 07:56:35.963380 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1007 07:56:35.963401 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1007 07:56:35.963407 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1007 07:56:35.978178 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1007 07:56:35.978231 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1007 07:56:35.978242 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI1007 07:56:35.978236 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1007 07:56:35.978254 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1007 07:56:35.978304 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1007 07:56:35.978312 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1007 07:56:35.978319 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1007 07:56:35.979012 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0b72e1d82ba1cfc118ea122535db5c358f503a17b497c21c481e1b56bb578e8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec502d866371b40ecc3c8728889725cc1c2d9c8be18dbce17935992980fc1b9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec502d866371b40ecc3c8728889725cc1c2d9c8be18dbce17935992980fc1b9a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:35Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:35 crc kubenswrapper[4875]: I1007 07:57:35.733371 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5e7e967b-bfbd-4738-9082-7bc94f2f32fb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21a5c9629b4ec0c85ab80d893dd502732395c3dd8b418a686d3c2ac55e69fa8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1222e78ddf0e6e37cfd1354cd95301c49aac7faf38193b66c6a3b66b0617d018\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1222e78ddf0e6e37cfd1354cd95301c49aac7faf38193b66c6a3b66b0617d018\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:15Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:35Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:35 crc kubenswrapper[4875]: I1007 07:57:35.743951 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:35Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:35 crc kubenswrapper[4875]: I1007 07:57:35.756849 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43e570e7b8391b0f38f3228dc0380ac8ee23e83f498989343bdee6a53c3ede41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:35Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:35 crc kubenswrapper[4875]: I1007 07:57:35.768658 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01fe5bea18c473e716831e2a9a876a8517d6de78b9b870929ecc1c4397d33fb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:35Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:35 crc kubenswrapper[4875]: I1007 07:57:35.782046 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wc2jq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e5a790e1-c591-4cfc-930f-4805a923790b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://512ba91c00a610fe56d1abb8dd82712ba14ce76b4148fe0c69e8864c9a87f977\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0fda9fd366641ca5799c0ca6c7aa926af6b41609e157c11a15b2077d4a5ce3aa\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-07T07:57:24Z\\\",\\\"message\\\":\\\"2025-10-07T07:56:39+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_790ceaf5-76bc-41c6-a081-e5e3ea810537\\\\n2025-10-07T07:56:39+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_790ceaf5-76bc-41c6-a081-e5e3ea810537 to /host/opt/cni/bin/\\\\n2025-10-07T07:56:39Z [verbose] multus-daemon started\\\\n2025-10-07T07:56:39Z [verbose] Readiness Indicator file check\\\\n2025-10-07T07:57:24Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:57:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4s782\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wc2jq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:35Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:35 crc kubenswrapper[4875]: I1007 07:57:35.782838 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:35 crc kubenswrapper[4875]: I1007 07:57:35.782936 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:35 crc kubenswrapper[4875]: I1007 07:57:35.782948 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:35 crc kubenswrapper[4875]: I1007 07:57:35.782964 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:35 crc kubenswrapper[4875]: I1007 07:57:35.782974 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:35Z","lastTransitionTime":"2025-10-07T07:57:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:35 crc kubenswrapper[4875]: I1007 07:57:35.797301 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3928c10c-c3da-41eb-96b2-629d67cfb31f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://27e5a1d7a65296f23b224f9ace2760c32b6e1ca146cf7a42a601de22632250d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qd4f6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34b235fca1c383e868641a953bb908901bcd10528ee9fb668c1f71eb6e0e978a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qd4f6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hx68m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:35Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:35 crc kubenswrapper[4875]: I1007 07:57:35.810581 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-wk8rw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dce21abc-1295-4d45-bd26-07b7e37d674c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:50Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:50Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jx7vv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jx7vv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:50Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-wk8rw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:35Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:35 crc kubenswrapper[4875]: I1007 07:57:35.821374 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:35Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:35 crc kubenswrapper[4875]: I1007 07:57:35.831053 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-9tw9m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f8b02a8a-b8d2-4097-b768-132e4c46938a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f6efce0afada950650dfd1a7345a5132509ead755458e53915fcf3335ffe73e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-9tw9m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:35Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:35 crc kubenswrapper[4875]: I1007 07:57:35.842159 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4ed41f5e-e186-4ede-a0a5-d5a015781ba1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce6527e17c61ccce5c81a87b9a6d3139c9f17a0736c08dfbb8907610d6270adb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c8f4b6a57610b6440afa3652ea5c2926072ad3bc658c72c30c8d28a2832cf900\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8a810dc9c2d8d92af77e0b4c1a21ab24e7da3de04f9c1b00b8399ec0b2cf4ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://965fc2ef9b26d12ee6c5e6adb88200cb52574c8f861f08d1f767c23ea861e6ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://965fc2ef9b26d12ee6c5e6adb88200cb52574c8f861f08d1f767c23ea861e6ef\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:16Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:15Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:35Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:35 crc kubenswrapper[4875]: I1007 07:57:35.853806 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:35Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:35 crc kubenswrapper[4875]: I1007 07:57:35.867113 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-zk2kz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7021fc56-b485-4ca6-80e8-56665ade004f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff571e5d1552af3387066929e3c02a939848e65fbf853aec600b03e0f72d5d3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d91377f30f2c7676df79d679bad49e0a162667214bd7e6ed4ad8ef3606a8752\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d91377f30f2c7676df79d679bad49e0a162667214bd7e6ed4ad8ef3606a8752\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecf0876b0824ab4fd9c692b0455050ac0a2d61663f74f13f578aebd6f3869f3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ecf0876b0824ab4fd9c692b0455050ac0a2d61663f74f13f578aebd6f3869f3f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9714afc60a059f6aa35d965ae1e15ea03e01cf9162a44d632ceed9d772e966bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9714afc60a059f6aa35d965ae1e15ea03e01cf9162a44d632ceed9d772e966bf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1f6109783534c4c80c81daf9eb0f04a9fbced0b143aeaa0c76683b9d2d38669\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c1f6109783534c4c80c81daf9eb0f04a9fbced0b143aeaa0c76683b9d2d38669\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c07a8fe8d39f3de33d3b132c33386a5c63a0948a1205f48e9a8788dd77ba8bc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c07a8fe8d39f3de33d3b132c33386a5c63a0948a1205f48e9a8788dd77ba8bc2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bce803064d5256265cdaf7f6e16d02780eab07d41d1f547dedd1c1b85972f543\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bce803064d5256265cdaf7f6e16d02780eab07d41d1f547dedd1c1b85972f543\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-zk2kz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:35Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:35 crc kubenswrapper[4875]: I1007 07:57:35.881068 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62ae4c53-7cf1-4229-84b4-045397dbcfba\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ae8881986c1b2abd841adfadbac7dc3d73096c5366485350536c08f478f9762\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://05f20f2103458077317614bcd5338d803c52c67e7dfc562103c817d33fa5bc79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f2213a20d36b8a125c6004957d5be402e0b85bbc2165ebd964ab499dfffb877\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://841f5c2218ad3912bf23dc2df80f24ca696829ba8e2c2d9722264688b25f0846\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:15Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:35Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:35 crc kubenswrapper[4875]: I1007 07:57:35.884822 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:35 crc kubenswrapper[4875]: I1007 07:57:35.884860 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:35 crc kubenswrapper[4875]: I1007 07:57:35.884869 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:35 crc kubenswrapper[4875]: I1007 07:57:35.884899 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:35 crc kubenswrapper[4875]: I1007 07:57:35.884909 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:35Z","lastTransitionTime":"2025-10-07T07:57:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:35 crc kubenswrapper[4875]: I1007 07:57:35.900367 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3527b3b9-1734-481d-9d80-9093e388afec\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7149243f8781d5a5e845821a8016bc06ec3678d69dfbde72d3429c48c64a98b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51d55feee1c9378afae0885cd063f39a2b4bd057db72c776941837d8d4098132\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6dc29635af9843109e3c200fc756d352e1a81b01dcc52a30a005291d1fa9f8d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e0b3573713f1b9d23177e5a1412d8db366ca718e6c19f6af9e234940505f2e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20382715fd9d79a3b60b16d950d6c4fe5b0d34ae14ad034f684d97762205263a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cfae59bc7d6183b2b5c6c04dd53b1bfc8b62abf90f67f74c7a56bf5039a5b50a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cfae59bc7d6183b2b5c6c04dd53b1bfc8b62abf90f67f74c7a56bf5039a5b50a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88968e9f639dc3664a87729128c531283b37b7c2da6aae70905f9f63a980fb54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88968e9f639dc3664a87729128c531283b37b7c2da6aae70905f9f63a980fb54\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://1ae0cbc996e8c2d24e8fbbf34b292d994bcc704c7b27fdc8b7fb7e64be0902ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1ae0cbc996e8c2d24e8fbbf34b292d994bcc704c7b27fdc8b7fb7e64be0902ca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:15Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:35Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:35 crc kubenswrapper[4875]: I1007 07:57:35.911336 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://455d92e6c0a43eabcbdc94940cb5bb0308a747e311fb79711b5ec8586d2f290c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e98da4809cdbbaa0bc7a3ed73df6eeb9260101f8336e16e3e93cba2be423f957\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:35Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:35 crc kubenswrapper[4875]: I1007 07:57:35.928706 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-8tcxj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f7806e48-48e7-4680-af2e-e93a05003370\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e5e9d96fa7102ca0dcbb0fcc156b21899217a32baae85be23379cca6c8a6a93c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2df6aadbe568d17b58738f194596f0e83432176769f7c5bd87e7d4000d3f1ed8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5eb9157d6f41a164c7cb952c983b441030a97a8f3b8009359440c5eba9846f26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8c31cbcb48e003dac873f108a5eb3aaff8d7016f7557fb683ae607d7819d7ceb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c944396320312ab92eb8dd7f1d771a00b965031717edbbeb2b467cee0788dec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6db9e77922c120dbfa1ccd55c96777ac3b18e0fcf8bb96956414858ae2c8fcff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://75be2ad687d37c776e821aa271a95c7dda1c9bd5d9c705202fb41c7d27d8c3eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75be2ad687d37c776e821aa271a95c7dda1c9bd5d9c705202fb41c7d27d8c3eb\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-07T07:57:07Z\\\",\\\"message\\\":\\\" start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:07Z is after 2025-08-24T17:21:41Z]\\\\nI1007 07:57:07.568842 6518 services_controller.go:473] Services do not match for network=default, existing lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-marketplace/redhat-marketplace_TCP_cluster\\\\\\\", UUID:\\\\\\\"97b6e7b0-06ca-455e-8259-06895040cb0c\\\\\\\", Protocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-marketplace/redhat-marketplace\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T07:57:06Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-8tcxj_openshift-ovn-kubernetes(f7806e48-48e7-4680-af2e-e93a05003370)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b3b1269130a0a1192546bec4aa5b2d4bfd65f381f6cd3948dd54d49b482e6d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e7cdfd8fed2f21e7063841e5382e668130288feffd52ef6fc5f9b5bc1894f2a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e7cdfd8fed2f21e7063841e5382e668130288feffd52ef6fc5f9b5bc1894f2a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-8tcxj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:35Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:35 crc kubenswrapper[4875]: I1007 07:57:35.939494 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ddx6l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0605d52-0cfc-4bcf-9218-1991257047cd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bdc7823b5b4f40adc245f9650864ebcb03f791dcbd6167f41c362bcdcc3a6655\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l9bwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:39Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ddx6l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:35Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:35 crc kubenswrapper[4875]: I1007 07:57:35.988248 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:35 crc kubenswrapper[4875]: I1007 07:57:35.988289 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:35 crc kubenswrapper[4875]: I1007 07:57:35.988300 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:35 crc kubenswrapper[4875]: I1007 07:57:35.988315 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:35 crc kubenswrapper[4875]: I1007 07:57:35.988325 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:35Z","lastTransitionTime":"2025-10-07T07:57:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:36 crc kubenswrapper[4875]: I1007 07:57:36.090910 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:36 crc kubenswrapper[4875]: I1007 07:57:36.090979 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:36 crc kubenswrapper[4875]: I1007 07:57:36.090994 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:36 crc kubenswrapper[4875]: I1007 07:57:36.091013 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:36 crc kubenswrapper[4875]: I1007 07:57:36.091053 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:36Z","lastTransitionTime":"2025-10-07T07:57:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:36 crc kubenswrapper[4875]: I1007 07:57:36.193568 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:36 crc kubenswrapper[4875]: I1007 07:57:36.193615 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:36 crc kubenswrapper[4875]: I1007 07:57:36.193626 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:36 crc kubenswrapper[4875]: I1007 07:57:36.193646 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:36 crc kubenswrapper[4875]: I1007 07:57:36.193658 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:36Z","lastTransitionTime":"2025-10-07T07:57:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:36 crc kubenswrapper[4875]: I1007 07:57:36.296166 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:36 crc kubenswrapper[4875]: I1007 07:57:36.296205 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:36 crc kubenswrapper[4875]: I1007 07:57:36.296217 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:36 crc kubenswrapper[4875]: I1007 07:57:36.296236 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:36 crc kubenswrapper[4875]: I1007 07:57:36.296248 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:36Z","lastTransitionTime":"2025-10-07T07:57:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:36 crc kubenswrapper[4875]: I1007 07:57:36.399150 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:36 crc kubenswrapper[4875]: I1007 07:57:36.399213 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:36 crc kubenswrapper[4875]: I1007 07:57:36.399225 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:36 crc kubenswrapper[4875]: I1007 07:57:36.399246 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:36 crc kubenswrapper[4875]: I1007 07:57:36.399260 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:36Z","lastTransitionTime":"2025-10-07T07:57:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:36 crc kubenswrapper[4875]: I1007 07:57:36.501401 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:36 crc kubenswrapper[4875]: I1007 07:57:36.501443 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:36 crc kubenswrapper[4875]: I1007 07:57:36.501459 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:36 crc kubenswrapper[4875]: I1007 07:57:36.501474 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:36 crc kubenswrapper[4875]: I1007 07:57:36.501484 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:36Z","lastTransitionTime":"2025-10-07T07:57:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:36 crc kubenswrapper[4875]: I1007 07:57:36.603873 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:36 crc kubenswrapper[4875]: I1007 07:57:36.603963 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:36 crc kubenswrapper[4875]: I1007 07:57:36.603983 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:36 crc kubenswrapper[4875]: I1007 07:57:36.604005 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:36 crc kubenswrapper[4875]: I1007 07:57:36.604021 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:36Z","lastTransitionTime":"2025-10-07T07:57:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:36 crc kubenswrapper[4875]: I1007 07:57:36.696846 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-wk8rw" Oct 07 07:57:36 crc kubenswrapper[4875]: I1007 07:57:36.696855 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 07:57:36 crc kubenswrapper[4875]: E1007 07:57:36.697126 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-wk8rw" podUID="dce21abc-1295-4d45-bd26-07b7e37d674c" Oct 07 07:57:36 crc kubenswrapper[4875]: E1007 07:57:36.697469 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 07:57:36 crc kubenswrapper[4875]: I1007 07:57:36.697776 4875 scope.go:117] "RemoveContainer" containerID="75be2ad687d37c776e821aa271a95c7dda1c9bd5d9c705202fb41c7d27d8c3eb" Oct 07 07:57:36 crc kubenswrapper[4875]: I1007 07:57:36.706538 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:36 crc kubenswrapper[4875]: I1007 07:57:36.706572 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:36 crc kubenswrapper[4875]: I1007 07:57:36.706583 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:36 crc kubenswrapper[4875]: I1007 07:57:36.706598 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:36 crc kubenswrapper[4875]: I1007 07:57:36.706611 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:36Z","lastTransitionTime":"2025-10-07T07:57:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:36 crc kubenswrapper[4875]: I1007 07:57:36.809224 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:36 crc kubenswrapper[4875]: I1007 07:57:36.809576 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:36 crc kubenswrapper[4875]: I1007 07:57:36.809588 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:36 crc kubenswrapper[4875]: I1007 07:57:36.809606 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:36 crc kubenswrapper[4875]: I1007 07:57:36.809620 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:36Z","lastTransitionTime":"2025-10-07T07:57:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:36 crc kubenswrapper[4875]: I1007 07:57:36.912507 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:36 crc kubenswrapper[4875]: I1007 07:57:36.912568 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:36 crc kubenswrapper[4875]: I1007 07:57:36.912578 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:36 crc kubenswrapper[4875]: I1007 07:57:36.912592 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:36 crc kubenswrapper[4875]: I1007 07:57:36.912601 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:36Z","lastTransitionTime":"2025-10-07T07:57:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:37 crc kubenswrapper[4875]: I1007 07:57:37.015867 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:37 crc kubenswrapper[4875]: I1007 07:57:37.015958 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:37 crc kubenswrapper[4875]: I1007 07:57:37.015971 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:37 crc kubenswrapper[4875]: I1007 07:57:37.015994 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:37 crc kubenswrapper[4875]: I1007 07:57:37.016009 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:37Z","lastTransitionTime":"2025-10-07T07:57:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:37 crc kubenswrapper[4875]: I1007 07:57:37.117961 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:37 crc kubenswrapper[4875]: I1007 07:57:37.117999 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:37 crc kubenswrapper[4875]: I1007 07:57:37.118008 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:37 crc kubenswrapper[4875]: I1007 07:57:37.118024 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:37 crc kubenswrapper[4875]: I1007 07:57:37.118034 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:37Z","lastTransitionTime":"2025-10-07T07:57:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:37 crc kubenswrapper[4875]: I1007 07:57:37.194476 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-8tcxj_f7806e48-48e7-4680-af2e-e93a05003370/ovnkube-controller/2.log" Oct 07 07:57:37 crc kubenswrapper[4875]: I1007 07:57:37.196886 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-8tcxj" event={"ID":"f7806e48-48e7-4680-af2e-e93a05003370","Type":"ContainerStarted","Data":"5d8ea2a287411218c24c1e8eaf4aeb64b4833e621385a4d8b32ffc0f79ce461f"} Oct 07 07:57:37 crc kubenswrapper[4875]: I1007 07:57:37.197210 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-8tcxj" Oct 07 07:57:37 crc kubenswrapper[4875]: I1007 07:57:37.208569 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:37Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:37 crc kubenswrapper[4875]: I1007 07:57:37.219586 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-9tw9m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f8b02a8a-b8d2-4097-b768-132e4c46938a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f6efce0afada950650dfd1a7345a5132509ead755458e53915fcf3335ffe73e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-9tw9m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:37Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:37 crc kubenswrapper[4875]: I1007 07:57:37.220163 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:37 crc kubenswrapper[4875]: I1007 07:57:37.220186 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:37 crc kubenswrapper[4875]: I1007 07:57:37.220194 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:37 crc kubenswrapper[4875]: I1007 07:57:37.220207 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:37 crc kubenswrapper[4875]: I1007 07:57:37.220217 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:37Z","lastTransitionTime":"2025-10-07T07:57:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:37 crc kubenswrapper[4875]: I1007 07:57:37.237618 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4ed41f5e-e186-4ede-a0a5-d5a015781ba1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce6527e17c61ccce5c81a87b9a6d3139c9f17a0736c08dfbb8907610d6270adb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c8f4b6a57610b6440afa3652ea5c2926072ad3bc658c72c30c8d28a2832cf900\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8a810dc9c2d8d92af77e0b4c1a21ab24e7da3de04f9c1b00b8399ec0b2cf4ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://965fc2ef9b26d12ee6c5e6adb88200cb52574c8f861f08d1f767c23ea861e6ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://965fc2ef9b26d12ee6c5e6adb88200cb52574c8f861f08d1f767c23ea861e6ef\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:16Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:15Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:37Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:37 crc kubenswrapper[4875]: I1007 07:57:37.251600 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:37Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:37 crc kubenswrapper[4875]: I1007 07:57:37.267523 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-zk2kz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7021fc56-b485-4ca6-80e8-56665ade004f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff571e5d1552af3387066929e3c02a939848e65fbf853aec600b03e0f72d5d3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d91377f30f2c7676df79d679bad49e0a162667214bd7e6ed4ad8ef3606a8752\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d91377f30f2c7676df79d679bad49e0a162667214bd7e6ed4ad8ef3606a8752\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecf0876b0824ab4fd9c692b0455050ac0a2d61663f74f13f578aebd6f3869f3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ecf0876b0824ab4fd9c692b0455050ac0a2d61663f74f13f578aebd6f3869f3f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9714afc60a059f6aa35d965ae1e15ea03e01cf9162a44d632ceed9d772e966bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9714afc60a059f6aa35d965ae1e15ea03e01cf9162a44d632ceed9d772e966bf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1f6109783534c4c80c81daf9eb0f04a9fbced0b143aeaa0c76683b9d2d38669\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c1f6109783534c4c80c81daf9eb0f04a9fbced0b143aeaa0c76683b9d2d38669\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c07a8fe8d39f3de33d3b132c33386a5c63a0948a1205f48e9a8788dd77ba8bc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c07a8fe8d39f3de33d3b132c33386a5c63a0948a1205f48e9a8788dd77ba8bc2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bce803064d5256265cdaf7f6e16d02780eab07d41d1f547dedd1c1b85972f543\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bce803064d5256265cdaf7f6e16d02780eab07d41d1f547dedd1c1b85972f543\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-zk2kz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:37Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:37 crc kubenswrapper[4875]: I1007 07:57:37.278316 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ddx6l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0605d52-0cfc-4bcf-9218-1991257047cd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bdc7823b5b4f40adc245f9650864ebcb03f791dcbd6167f41c362bcdcc3a6655\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l9bwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:39Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ddx6l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:37Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:37 crc kubenswrapper[4875]: I1007 07:57:37.291206 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62ae4c53-7cf1-4229-84b4-045397dbcfba\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ae8881986c1b2abd841adfadbac7dc3d73096c5366485350536c08f478f9762\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://05f20f2103458077317614bcd5338d803c52c67e7dfc562103c817d33fa5bc79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f2213a20d36b8a125c6004957d5be402e0b85bbc2165ebd964ab499dfffb877\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://841f5c2218ad3912bf23dc2df80f24ca696829ba8e2c2d9722264688b25f0846\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:15Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:37Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:37 crc kubenswrapper[4875]: I1007 07:57:37.310770 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3527b3b9-1734-481d-9d80-9093e388afec\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7149243f8781d5a5e845821a8016bc06ec3678d69dfbde72d3429c48c64a98b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51d55feee1c9378afae0885cd063f39a2b4bd057db72c776941837d8d4098132\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6dc29635af9843109e3c200fc756d352e1a81b01dcc52a30a005291d1fa9f8d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e0b3573713f1b9d23177e5a1412d8db366ca718e6c19f6af9e234940505f2e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20382715fd9d79a3b60b16d950d6c4fe5b0d34ae14ad034f684d97762205263a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cfae59bc7d6183b2b5c6c04dd53b1bfc8b62abf90f67f74c7a56bf5039a5b50a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cfae59bc7d6183b2b5c6c04dd53b1bfc8b62abf90f67f74c7a56bf5039a5b50a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88968e9f639dc3664a87729128c531283b37b7c2da6aae70905f9f63a980fb54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88968e9f639dc3664a87729128c531283b37b7c2da6aae70905f9f63a980fb54\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://1ae0cbc996e8c2d24e8fbbf34b292d994bcc704c7b27fdc8b7fb7e64be0902ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1ae0cbc996e8c2d24e8fbbf34b292d994bcc704c7b27fdc8b7fb7e64be0902ca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:15Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:37Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:37 crc kubenswrapper[4875]: I1007 07:57:37.322361 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:37 crc kubenswrapper[4875]: I1007 07:57:37.322388 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:37 crc kubenswrapper[4875]: I1007 07:57:37.322397 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:37 crc kubenswrapper[4875]: I1007 07:57:37.322410 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:37 crc kubenswrapper[4875]: I1007 07:57:37.322420 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:37Z","lastTransitionTime":"2025-10-07T07:57:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:37 crc kubenswrapper[4875]: I1007 07:57:37.326189 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://455d92e6c0a43eabcbdc94940cb5bb0308a747e311fb79711b5ec8586d2f290c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e98da4809cdbbaa0bc7a3ed73df6eeb9260101f8336e16e3e93cba2be423f957\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:37Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:37 crc kubenswrapper[4875]: I1007 07:57:37.347904 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-8tcxj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f7806e48-48e7-4680-af2e-e93a05003370\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e5e9d96fa7102ca0dcbb0fcc156b21899217a32baae85be23379cca6c8a6a93c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2df6aadbe568d17b58738f194596f0e83432176769f7c5bd87e7d4000d3f1ed8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5eb9157d6f41a164c7cb952c983b441030a97a8f3b8009359440c5eba9846f26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8c31cbcb48e003dac873f108a5eb3aaff8d7016f7557fb683ae607d7819d7ceb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c944396320312ab92eb8dd7f1d771a00b965031717edbbeb2b467cee0788dec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6db9e77922c120dbfa1ccd55c96777ac3b18e0fcf8bb96956414858ae2c8fcff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d8ea2a287411218c24c1e8eaf4aeb64b4833e621385a4d8b32ffc0f79ce461f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75be2ad687d37c776e821aa271a95c7dda1c9bd5d9c705202fb41c7d27d8c3eb\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-07T07:57:07Z\\\",\\\"message\\\":\\\" start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:07Z is after 2025-08-24T17:21:41Z]\\\\nI1007 07:57:07.568842 6518 services_controller.go:473] Services do not match for network=default, existing lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-marketplace/redhat-marketplace_TCP_cluster\\\\\\\", UUID:\\\\\\\"97b6e7b0-06ca-455e-8259-06895040cb0c\\\\\\\", Protocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-marketplace/redhat-marketplace\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T07:57:06Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:57:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b3b1269130a0a1192546bec4aa5b2d4bfd65f381f6cd3948dd54d49b482e6d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e7cdfd8fed2f21e7063841e5382e668130288feffd52ef6fc5f9b5bc1894f2a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e7cdfd8fed2f21e7063841e5382e668130288feffd52ef6fc5f9b5bc1894f2a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-8tcxj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:37Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:37 crc kubenswrapper[4875]: I1007 07:57:37.361168 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01fe5bea18c473e716831e2a9a876a8517d6de78b9b870929ecc1c4397d33fb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:37Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:37 crc kubenswrapper[4875]: I1007 07:57:37.373261 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wc2jq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e5a790e1-c591-4cfc-930f-4805a923790b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://512ba91c00a610fe56d1abb8dd82712ba14ce76b4148fe0c69e8864c9a87f977\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0fda9fd366641ca5799c0ca6c7aa926af6b41609e157c11a15b2077d4a5ce3aa\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-07T07:57:24Z\\\",\\\"message\\\":\\\"2025-10-07T07:56:39+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_790ceaf5-76bc-41c6-a081-e5e3ea810537\\\\n2025-10-07T07:56:39+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_790ceaf5-76bc-41c6-a081-e5e3ea810537 to /host/opt/cni/bin/\\\\n2025-10-07T07:56:39Z [verbose] multus-daemon started\\\\n2025-10-07T07:56:39Z [verbose] Readiness Indicator file check\\\\n2025-10-07T07:57:24Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:57:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4s782\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wc2jq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:37Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:37 crc kubenswrapper[4875]: I1007 07:57:37.401500 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3928c10c-c3da-41eb-96b2-629d67cfb31f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://27e5a1d7a65296f23b224f9ace2760c32b6e1ca146cf7a42a601de22632250d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qd4f6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34b235fca1c383e868641a953bb908901bcd10528ee9fb668c1f71eb6e0e978a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qd4f6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hx68m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:37Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:37 crc kubenswrapper[4875]: I1007 07:57:37.421649 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rbr4j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1d636971-3387-4f3c-b4a1-54a1da1e2fbe\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c836128e294716bdbbc7bf57f81f71a88656e83be509510722d13b3f66e7d3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7bd9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://12ef6aa200be5c0cc9b34435271b54e4852ad9118d7334d57e07c1f6f75100f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7bd9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:48Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-rbr4j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:37Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:37 crc kubenswrapper[4875]: I1007 07:57:37.424992 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:37 crc kubenswrapper[4875]: I1007 07:57:37.425052 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:37 crc kubenswrapper[4875]: I1007 07:57:37.425061 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:37 crc kubenswrapper[4875]: I1007 07:57:37.425077 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:37 crc kubenswrapper[4875]: I1007 07:57:37.425087 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:37Z","lastTransitionTime":"2025-10-07T07:57:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:37 crc kubenswrapper[4875]: I1007 07:57:37.437721 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"01cd6f59-4f16-41d3-b43b-cd3cb9efd9b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ca1c70fa4c2dd9e87cc15766e27c2735768f392d019d6004ae50c94595651c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4bed5cf79847998d3d72c69b2f5773d64affb1a1f13ecc3af99bda8307bb8d1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ba6ab5ec6a70a00059ba119970c6b139879c3c568475f2acf45b00f4ade9e22\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10e2645f8bd2ed4e578809e1a15c65b5a0bf19a2bc135dd5f626b2b3dfce0113\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://671f4d3e518bf0230af61a6bfc3f70b8cea7f0e8f972ad605a78cded0306b084\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"message\\\":\\\"le observer\\\\nW1007 07:56:35.379471 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1007 07:56:35.379613 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1007 07:56:35.380419 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1733405231/tls.crt::/tmp/serving-cert-1733405231/tls.key\\\\\\\"\\\\nI1007 07:56:35.960385 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1007 07:56:35.963357 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1007 07:56:35.963380 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1007 07:56:35.963401 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1007 07:56:35.963407 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1007 07:56:35.978178 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1007 07:56:35.978231 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1007 07:56:35.978242 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI1007 07:56:35.978236 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1007 07:56:35.978254 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1007 07:56:35.978304 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1007 07:56:35.978312 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1007 07:56:35.978319 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1007 07:56:35.979012 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0b72e1d82ba1cfc118ea122535db5c358f503a17b497c21c481e1b56bb578e8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec502d866371b40ecc3c8728889725cc1c2d9c8be18dbce17935992980fc1b9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec502d866371b40ecc3c8728889725cc1c2d9c8be18dbce17935992980fc1b9a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:37Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:37 crc kubenswrapper[4875]: I1007 07:57:37.446642 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5e7e967b-bfbd-4738-9082-7bc94f2f32fb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21a5c9629b4ec0c85ab80d893dd502732395c3dd8b418a686d3c2ac55e69fa8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1222e78ddf0e6e37cfd1354cd95301c49aac7faf38193b66c6a3b66b0617d018\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1222e78ddf0e6e37cfd1354cd95301c49aac7faf38193b66c6a3b66b0617d018\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:15Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:37Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:37 crc kubenswrapper[4875]: I1007 07:57:37.458354 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:37Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:37 crc kubenswrapper[4875]: I1007 07:57:37.477374 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43e570e7b8391b0f38f3228dc0380ac8ee23e83f498989343bdee6a53c3ede41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:37Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:37 crc kubenswrapper[4875]: I1007 07:57:37.488384 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-wk8rw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dce21abc-1295-4d45-bd26-07b7e37d674c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:50Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:50Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jx7vv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jx7vv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:50Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-wk8rw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:37Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:37 crc kubenswrapper[4875]: I1007 07:57:37.527628 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:37 crc kubenswrapper[4875]: I1007 07:57:37.527672 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:37 crc kubenswrapper[4875]: I1007 07:57:37.527684 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:37 crc kubenswrapper[4875]: I1007 07:57:37.527704 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:37 crc kubenswrapper[4875]: I1007 07:57:37.527715 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:37Z","lastTransitionTime":"2025-10-07T07:57:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:37 crc kubenswrapper[4875]: I1007 07:57:37.629967 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:37 crc kubenswrapper[4875]: I1007 07:57:37.630000 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:37 crc kubenswrapper[4875]: I1007 07:57:37.630011 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:37 crc kubenswrapper[4875]: I1007 07:57:37.630024 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:37 crc kubenswrapper[4875]: I1007 07:57:37.630033 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:37Z","lastTransitionTime":"2025-10-07T07:57:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:37 crc kubenswrapper[4875]: I1007 07:57:37.696798 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 07:57:37 crc kubenswrapper[4875]: I1007 07:57:37.696937 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 07:57:37 crc kubenswrapper[4875]: E1007 07:57:37.696999 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 07:57:37 crc kubenswrapper[4875]: E1007 07:57:37.696932 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 07:57:37 crc kubenswrapper[4875]: I1007 07:57:37.732181 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:37 crc kubenswrapper[4875]: I1007 07:57:37.732228 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:37 crc kubenswrapper[4875]: I1007 07:57:37.732238 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:37 crc kubenswrapper[4875]: I1007 07:57:37.732254 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:37 crc kubenswrapper[4875]: I1007 07:57:37.732265 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:37Z","lastTransitionTime":"2025-10-07T07:57:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:37 crc kubenswrapper[4875]: I1007 07:57:37.834163 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:37 crc kubenswrapper[4875]: I1007 07:57:37.834208 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:37 crc kubenswrapper[4875]: I1007 07:57:37.834220 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:37 crc kubenswrapper[4875]: I1007 07:57:37.834241 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:37 crc kubenswrapper[4875]: I1007 07:57:37.834253 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:37Z","lastTransitionTime":"2025-10-07T07:57:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:37 crc kubenswrapper[4875]: I1007 07:57:37.936768 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:37 crc kubenswrapper[4875]: I1007 07:57:37.936825 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:37 crc kubenswrapper[4875]: I1007 07:57:37.936835 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:37 crc kubenswrapper[4875]: I1007 07:57:37.936850 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:37 crc kubenswrapper[4875]: I1007 07:57:37.936860 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:37Z","lastTransitionTime":"2025-10-07T07:57:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:38 crc kubenswrapper[4875]: I1007 07:57:38.038822 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:38 crc kubenswrapper[4875]: I1007 07:57:38.038860 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:38 crc kubenswrapper[4875]: I1007 07:57:38.038869 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:38 crc kubenswrapper[4875]: I1007 07:57:38.038905 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:38 crc kubenswrapper[4875]: I1007 07:57:38.038916 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:38Z","lastTransitionTime":"2025-10-07T07:57:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:38 crc kubenswrapper[4875]: I1007 07:57:38.141113 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:38 crc kubenswrapper[4875]: I1007 07:57:38.141153 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:38 crc kubenswrapper[4875]: I1007 07:57:38.141163 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:38 crc kubenswrapper[4875]: I1007 07:57:38.141179 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:38 crc kubenswrapper[4875]: I1007 07:57:38.141191 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:38Z","lastTransitionTime":"2025-10-07T07:57:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:38 crc kubenswrapper[4875]: I1007 07:57:38.201942 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-8tcxj_f7806e48-48e7-4680-af2e-e93a05003370/ovnkube-controller/3.log" Oct 07 07:57:38 crc kubenswrapper[4875]: I1007 07:57:38.202623 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-8tcxj_f7806e48-48e7-4680-af2e-e93a05003370/ovnkube-controller/2.log" Oct 07 07:57:38 crc kubenswrapper[4875]: I1007 07:57:38.205185 4875 generic.go:334] "Generic (PLEG): container finished" podID="f7806e48-48e7-4680-af2e-e93a05003370" containerID="5d8ea2a287411218c24c1e8eaf4aeb64b4833e621385a4d8b32ffc0f79ce461f" exitCode=1 Oct 07 07:57:38 crc kubenswrapper[4875]: I1007 07:57:38.205229 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-8tcxj" event={"ID":"f7806e48-48e7-4680-af2e-e93a05003370","Type":"ContainerDied","Data":"5d8ea2a287411218c24c1e8eaf4aeb64b4833e621385a4d8b32ffc0f79ce461f"} Oct 07 07:57:38 crc kubenswrapper[4875]: I1007 07:57:38.205267 4875 scope.go:117] "RemoveContainer" containerID="75be2ad687d37c776e821aa271a95c7dda1c9bd5d9c705202fb41c7d27d8c3eb" Oct 07 07:57:38 crc kubenswrapper[4875]: I1007 07:57:38.205915 4875 scope.go:117] "RemoveContainer" containerID="5d8ea2a287411218c24c1e8eaf4aeb64b4833e621385a4d8b32ffc0f79ce461f" Oct 07 07:57:38 crc kubenswrapper[4875]: E1007 07:57:38.206069 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-8tcxj_openshift-ovn-kubernetes(f7806e48-48e7-4680-af2e-e93a05003370)\"" pod="openshift-ovn-kubernetes/ovnkube-node-8tcxj" podUID="f7806e48-48e7-4680-af2e-e93a05003370" Oct 07 07:57:38 crc kubenswrapper[4875]: I1007 07:57:38.219008 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4ed41f5e-e186-4ede-a0a5-d5a015781ba1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce6527e17c61ccce5c81a87b9a6d3139c9f17a0736c08dfbb8907610d6270adb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c8f4b6a57610b6440afa3652ea5c2926072ad3bc658c72c30c8d28a2832cf900\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8a810dc9c2d8d92af77e0b4c1a21ab24e7da3de04f9c1b00b8399ec0b2cf4ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://965fc2ef9b26d12ee6c5e6adb88200cb52574c8f861f08d1f767c23ea861e6ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://965fc2ef9b26d12ee6c5e6adb88200cb52574c8f861f08d1f767c23ea861e6ef\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:16Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:15Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:38Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:38 crc kubenswrapper[4875]: I1007 07:57:38.230730 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:38Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:38 crc kubenswrapper[4875]: I1007 07:57:38.243303 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:38 crc kubenswrapper[4875]: I1007 07:57:38.243349 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:38 crc kubenswrapper[4875]: I1007 07:57:38.243363 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:38 crc kubenswrapper[4875]: I1007 07:57:38.243383 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:38 crc kubenswrapper[4875]: I1007 07:57:38.243395 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:38Z","lastTransitionTime":"2025-10-07T07:57:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:38 crc kubenswrapper[4875]: I1007 07:57:38.244398 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-zk2kz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7021fc56-b485-4ca6-80e8-56665ade004f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff571e5d1552af3387066929e3c02a939848e65fbf853aec600b03e0f72d5d3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d91377f30f2c7676df79d679bad49e0a162667214bd7e6ed4ad8ef3606a8752\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d91377f30f2c7676df79d679bad49e0a162667214bd7e6ed4ad8ef3606a8752\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecf0876b0824ab4fd9c692b0455050ac0a2d61663f74f13f578aebd6f3869f3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ecf0876b0824ab4fd9c692b0455050ac0a2d61663f74f13f578aebd6f3869f3f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9714afc60a059f6aa35d965ae1e15ea03e01cf9162a44d632ceed9d772e966bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9714afc60a059f6aa35d965ae1e15ea03e01cf9162a44d632ceed9d772e966bf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1f6109783534c4c80c81daf9eb0f04a9fbced0b143aeaa0c76683b9d2d38669\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c1f6109783534c4c80c81daf9eb0f04a9fbced0b143aeaa0c76683b9d2d38669\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c07a8fe8d39f3de33d3b132c33386a5c63a0948a1205f48e9a8788dd77ba8bc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c07a8fe8d39f3de33d3b132c33386a5c63a0948a1205f48e9a8788dd77ba8bc2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bce803064d5256265cdaf7f6e16d02780eab07d41d1f547dedd1c1b85972f543\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bce803064d5256265cdaf7f6e16d02780eab07d41d1f547dedd1c1b85972f543\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-zk2kz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:38Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:38 crc kubenswrapper[4875]: I1007 07:57:38.261442 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-8tcxj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f7806e48-48e7-4680-af2e-e93a05003370\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e5e9d96fa7102ca0dcbb0fcc156b21899217a32baae85be23379cca6c8a6a93c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2df6aadbe568d17b58738f194596f0e83432176769f7c5bd87e7d4000d3f1ed8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5eb9157d6f41a164c7cb952c983b441030a97a8f3b8009359440c5eba9846f26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8c31cbcb48e003dac873f108a5eb3aaff8d7016f7557fb683ae607d7819d7ceb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c944396320312ab92eb8dd7f1d771a00b965031717edbbeb2b467cee0788dec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6db9e77922c120dbfa1ccd55c96777ac3b18e0fcf8bb96956414858ae2c8fcff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d8ea2a287411218c24c1e8eaf4aeb64b4833e621385a4d8b32ffc0f79ce461f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75be2ad687d37c776e821aa271a95c7dda1c9bd5d9c705202fb41c7d27d8c3eb\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-07T07:57:07Z\\\",\\\"message\\\":\\\" start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:07Z is after 2025-08-24T17:21:41Z]\\\\nI1007 07:57:07.568842 6518 services_controller.go:473] Services do not match for network=default, existing lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-marketplace/redhat-marketplace_TCP_cluster\\\\\\\", UUID:\\\\\\\"97b6e7b0-06ca-455e-8259-06895040cb0c\\\\\\\", Protocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-marketplace/redhat-marketplace\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T07:57:06Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5d8ea2a287411218c24c1e8eaf4aeb64b4833e621385a4d8b32ffc0f79ce461f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-07T07:57:37Z\\\",\\\"message\\\":\\\"hine-config-controller for endpointslice openshift-machine-config-operator/machine-config-controller-5lh94 as it is not a known egress service\\\\nI1007 07:57:37.500114 6921 egressservice_zone_node.go:110] Processing sync for Egress Service node crc\\\\nI1007 07:57:37.500150 6921 egressservice_zone_node.go:113] Finished syncing Egress Service node crc: 36.191µs\\\\nI1007 07:57:37.499908 6921 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1007 07:57:37.499418 6921 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1007 07:57:37.499597 6921 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1007 07:57:37.496449 6921 reflector.go:311] Stopping reflector *v1.EgressService (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI1007 07:57:37.500239 6921 egressservice_zone_endpointslice.go:80] Ignoring updating openshift-machine-config-operator/machine-config-operator for endpointslice openshift-machine-config-operator/machine-config-operator-g8487 as it is not a known egress service\\\\nI1007 07:57:37.500577 6921 ovnkube.go:599] Stopped ovnkube\\\\nI1007 07:57:37.500613 6921 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1007 07:57:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T07:57:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b3b1269130a0a1192546bec4aa5b2d4bfd65f381f6cd3948dd54d49b482e6d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e7cdfd8fed2f21e7063841e5382e668130288feffd52ef6fc5f9b5bc1894f2a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e7cdfd8fed2f21e7063841e5382e668130288feffd52ef6fc5f9b5bc1894f2a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-8tcxj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:38Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:38 crc kubenswrapper[4875]: I1007 07:57:38.271520 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ddx6l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0605d52-0cfc-4bcf-9218-1991257047cd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bdc7823b5b4f40adc245f9650864ebcb03f791dcbd6167f41c362bcdcc3a6655\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l9bwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:39Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ddx6l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:38Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:38 crc kubenswrapper[4875]: I1007 07:57:38.283778 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62ae4c53-7cf1-4229-84b4-045397dbcfba\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ae8881986c1b2abd841adfadbac7dc3d73096c5366485350536c08f478f9762\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://05f20f2103458077317614bcd5338d803c52c67e7dfc562103c817d33fa5bc79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f2213a20d36b8a125c6004957d5be402e0b85bbc2165ebd964ab499dfffb877\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://841f5c2218ad3912bf23dc2df80f24ca696829ba8e2c2d9722264688b25f0846\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:15Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:38Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:38 crc kubenswrapper[4875]: I1007 07:57:38.305059 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3527b3b9-1734-481d-9d80-9093e388afec\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7149243f8781d5a5e845821a8016bc06ec3678d69dfbde72d3429c48c64a98b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51d55feee1c9378afae0885cd063f39a2b4bd057db72c776941837d8d4098132\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6dc29635af9843109e3c200fc756d352e1a81b01dcc52a30a005291d1fa9f8d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e0b3573713f1b9d23177e5a1412d8db366ca718e6c19f6af9e234940505f2e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20382715fd9d79a3b60b16d950d6c4fe5b0d34ae14ad034f684d97762205263a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cfae59bc7d6183b2b5c6c04dd53b1bfc8b62abf90f67f74c7a56bf5039a5b50a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cfae59bc7d6183b2b5c6c04dd53b1bfc8b62abf90f67f74c7a56bf5039a5b50a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88968e9f639dc3664a87729128c531283b37b7c2da6aae70905f9f63a980fb54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88968e9f639dc3664a87729128c531283b37b7c2da6aae70905f9f63a980fb54\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://1ae0cbc996e8c2d24e8fbbf34b292d994bcc704c7b27fdc8b7fb7e64be0902ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1ae0cbc996e8c2d24e8fbbf34b292d994bcc704c7b27fdc8b7fb7e64be0902ca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:15Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:38Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:38 crc kubenswrapper[4875]: I1007 07:57:38.318225 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://455d92e6c0a43eabcbdc94940cb5bb0308a747e311fb79711b5ec8586d2f290c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e98da4809cdbbaa0bc7a3ed73df6eeb9260101f8336e16e3e93cba2be423f957\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:38Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:38 crc kubenswrapper[4875]: I1007 07:57:38.331123 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43e570e7b8391b0f38f3228dc0380ac8ee23e83f498989343bdee6a53c3ede41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:38Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:38 crc kubenswrapper[4875]: I1007 07:57:38.343175 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01fe5bea18c473e716831e2a9a876a8517d6de78b9b870929ecc1c4397d33fb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:38Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:38 crc kubenswrapper[4875]: I1007 07:57:38.345782 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:38 crc kubenswrapper[4875]: I1007 07:57:38.345817 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:38 crc kubenswrapper[4875]: I1007 07:57:38.345826 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:38 crc kubenswrapper[4875]: I1007 07:57:38.345846 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:38 crc kubenswrapper[4875]: I1007 07:57:38.345864 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:38Z","lastTransitionTime":"2025-10-07T07:57:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:38 crc kubenswrapper[4875]: I1007 07:57:38.354733 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wc2jq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e5a790e1-c591-4cfc-930f-4805a923790b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://512ba91c00a610fe56d1abb8dd82712ba14ce76b4148fe0c69e8864c9a87f977\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0fda9fd366641ca5799c0ca6c7aa926af6b41609e157c11a15b2077d4a5ce3aa\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-07T07:57:24Z\\\",\\\"message\\\":\\\"2025-10-07T07:56:39+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_790ceaf5-76bc-41c6-a081-e5e3ea810537\\\\n2025-10-07T07:56:39+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_790ceaf5-76bc-41c6-a081-e5e3ea810537 to /host/opt/cni/bin/\\\\n2025-10-07T07:56:39Z [verbose] multus-daemon started\\\\n2025-10-07T07:56:39Z [verbose] Readiness Indicator file check\\\\n2025-10-07T07:57:24Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:57:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4s782\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wc2jq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:38Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:38 crc kubenswrapper[4875]: I1007 07:57:38.364734 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3928c10c-c3da-41eb-96b2-629d67cfb31f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://27e5a1d7a65296f23b224f9ace2760c32b6e1ca146cf7a42a601de22632250d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qd4f6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34b235fca1c383e868641a953bb908901bcd10528ee9fb668c1f71eb6e0e978a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qd4f6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hx68m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:38Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:38 crc kubenswrapper[4875]: I1007 07:57:38.374724 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rbr4j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1d636971-3387-4f3c-b4a1-54a1da1e2fbe\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c836128e294716bdbbc7bf57f81f71a88656e83be509510722d13b3f66e7d3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7bd9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://12ef6aa200be5c0cc9b34435271b54e4852ad9118d7334d57e07c1f6f75100f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7bd9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:48Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-rbr4j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:38Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:38 crc kubenswrapper[4875]: I1007 07:57:38.386402 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"01cd6f59-4f16-41d3-b43b-cd3cb9efd9b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ca1c70fa4c2dd9e87cc15766e27c2735768f392d019d6004ae50c94595651c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4bed5cf79847998d3d72c69b2f5773d64affb1a1f13ecc3af99bda8307bb8d1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ba6ab5ec6a70a00059ba119970c6b139879c3c568475f2acf45b00f4ade9e22\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10e2645f8bd2ed4e578809e1a15c65b5a0bf19a2bc135dd5f626b2b3dfce0113\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://671f4d3e518bf0230af61a6bfc3f70b8cea7f0e8f972ad605a78cded0306b084\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"message\\\":\\\"le observer\\\\nW1007 07:56:35.379471 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1007 07:56:35.379613 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1007 07:56:35.380419 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1733405231/tls.crt::/tmp/serving-cert-1733405231/tls.key\\\\\\\"\\\\nI1007 07:56:35.960385 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1007 07:56:35.963357 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1007 07:56:35.963380 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1007 07:56:35.963401 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1007 07:56:35.963407 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1007 07:56:35.978178 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1007 07:56:35.978231 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1007 07:56:35.978242 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI1007 07:56:35.978236 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1007 07:56:35.978254 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1007 07:56:35.978304 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1007 07:56:35.978312 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1007 07:56:35.978319 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1007 07:56:35.979012 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0b72e1d82ba1cfc118ea122535db5c358f503a17b497c21c481e1b56bb578e8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec502d866371b40ecc3c8728889725cc1c2d9c8be18dbce17935992980fc1b9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec502d866371b40ecc3c8728889725cc1c2d9c8be18dbce17935992980fc1b9a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:38Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:38 crc kubenswrapper[4875]: I1007 07:57:38.395392 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5e7e967b-bfbd-4738-9082-7bc94f2f32fb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21a5c9629b4ec0c85ab80d893dd502732395c3dd8b418a686d3c2ac55e69fa8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1222e78ddf0e6e37cfd1354cd95301c49aac7faf38193b66c6a3b66b0617d018\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1222e78ddf0e6e37cfd1354cd95301c49aac7faf38193b66c6a3b66b0617d018\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:15Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:38Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:38 crc kubenswrapper[4875]: I1007 07:57:38.405953 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:38Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:38 crc kubenswrapper[4875]: I1007 07:57:38.415737 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-wk8rw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dce21abc-1295-4d45-bd26-07b7e37d674c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:50Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:50Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jx7vv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jx7vv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:50Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-wk8rw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:38Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:38 crc kubenswrapper[4875]: I1007 07:57:38.431056 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:38Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:38 crc kubenswrapper[4875]: I1007 07:57:38.441405 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-9tw9m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f8b02a8a-b8d2-4097-b768-132e4c46938a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f6efce0afada950650dfd1a7345a5132509ead755458e53915fcf3335ffe73e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-9tw9m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:38Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:38 crc kubenswrapper[4875]: I1007 07:57:38.448057 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:38 crc kubenswrapper[4875]: I1007 07:57:38.448099 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:38 crc kubenswrapper[4875]: I1007 07:57:38.448110 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:38 crc kubenswrapper[4875]: I1007 07:57:38.448125 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:38 crc kubenswrapper[4875]: I1007 07:57:38.448135 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:38Z","lastTransitionTime":"2025-10-07T07:57:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:38 crc kubenswrapper[4875]: I1007 07:57:38.550322 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:38 crc kubenswrapper[4875]: I1007 07:57:38.550361 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:38 crc kubenswrapper[4875]: I1007 07:57:38.550374 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:38 crc kubenswrapper[4875]: I1007 07:57:38.550389 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:38 crc kubenswrapper[4875]: I1007 07:57:38.550400 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:38Z","lastTransitionTime":"2025-10-07T07:57:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:38 crc kubenswrapper[4875]: I1007 07:57:38.652284 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:38 crc kubenswrapper[4875]: I1007 07:57:38.652320 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:38 crc kubenswrapper[4875]: I1007 07:57:38.652329 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:38 crc kubenswrapper[4875]: I1007 07:57:38.652342 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:38 crc kubenswrapper[4875]: I1007 07:57:38.652351 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:38Z","lastTransitionTime":"2025-10-07T07:57:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:38 crc kubenswrapper[4875]: I1007 07:57:38.697209 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-wk8rw" Oct 07 07:57:38 crc kubenswrapper[4875]: I1007 07:57:38.697277 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 07:57:38 crc kubenswrapper[4875]: E1007 07:57:38.697407 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-wk8rw" podUID="dce21abc-1295-4d45-bd26-07b7e37d674c" Oct 07 07:57:38 crc kubenswrapper[4875]: E1007 07:57:38.697464 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 07:57:38 crc kubenswrapper[4875]: I1007 07:57:38.755039 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:38 crc kubenswrapper[4875]: I1007 07:57:38.755072 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:38 crc kubenswrapper[4875]: I1007 07:57:38.755081 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:38 crc kubenswrapper[4875]: I1007 07:57:38.755094 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:38 crc kubenswrapper[4875]: I1007 07:57:38.755103 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:38Z","lastTransitionTime":"2025-10-07T07:57:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:38 crc kubenswrapper[4875]: I1007 07:57:38.857012 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:38 crc kubenswrapper[4875]: I1007 07:57:38.857058 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:38 crc kubenswrapper[4875]: I1007 07:57:38.857067 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:38 crc kubenswrapper[4875]: I1007 07:57:38.857082 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:38 crc kubenswrapper[4875]: I1007 07:57:38.857091 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:38Z","lastTransitionTime":"2025-10-07T07:57:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:38 crc kubenswrapper[4875]: I1007 07:57:38.959363 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:38 crc kubenswrapper[4875]: I1007 07:57:38.959402 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:38 crc kubenswrapper[4875]: I1007 07:57:38.959413 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:38 crc kubenswrapper[4875]: I1007 07:57:38.959428 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:38 crc kubenswrapper[4875]: I1007 07:57:38.959439 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:38Z","lastTransitionTime":"2025-10-07T07:57:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:39 crc kubenswrapper[4875]: I1007 07:57:39.061709 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:39 crc kubenswrapper[4875]: I1007 07:57:39.061756 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:39 crc kubenswrapper[4875]: I1007 07:57:39.061766 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:39 crc kubenswrapper[4875]: I1007 07:57:39.061783 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:39 crc kubenswrapper[4875]: I1007 07:57:39.061795 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:39Z","lastTransitionTime":"2025-10-07T07:57:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:39 crc kubenswrapper[4875]: I1007 07:57:39.164868 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:39 crc kubenswrapper[4875]: I1007 07:57:39.164937 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:39 crc kubenswrapper[4875]: I1007 07:57:39.164947 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:39 crc kubenswrapper[4875]: I1007 07:57:39.164967 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:39 crc kubenswrapper[4875]: I1007 07:57:39.164987 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:39Z","lastTransitionTime":"2025-10-07T07:57:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:39 crc kubenswrapper[4875]: I1007 07:57:39.211164 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-8tcxj_f7806e48-48e7-4680-af2e-e93a05003370/ovnkube-controller/3.log" Oct 07 07:57:39 crc kubenswrapper[4875]: I1007 07:57:39.216118 4875 scope.go:117] "RemoveContainer" containerID="5d8ea2a287411218c24c1e8eaf4aeb64b4833e621385a4d8b32ffc0f79ce461f" Oct 07 07:57:39 crc kubenswrapper[4875]: E1007 07:57:39.216504 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-8tcxj_openshift-ovn-kubernetes(f7806e48-48e7-4680-af2e-e93a05003370)\"" pod="openshift-ovn-kubernetes/ovnkube-node-8tcxj" podUID="f7806e48-48e7-4680-af2e-e93a05003370" Oct 07 07:57:39 crc kubenswrapper[4875]: I1007 07:57:39.231012 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4ed41f5e-e186-4ede-a0a5-d5a015781ba1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce6527e17c61ccce5c81a87b9a6d3139c9f17a0736c08dfbb8907610d6270adb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c8f4b6a57610b6440afa3652ea5c2926072ad3bc658c72c30c8d28a2832cf900\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8a810dc9c2d8d92af77e0b4c1a21ab24e7da3de04f9c1b00b8399ec0b2cf4ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://965fc2ef9b26d12ee6c5e6adb88200cb52574c8f861f08d1f767c23ea861e6ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://965fc2ef9b26d12ee6c5e6adb88200cb52574c8f861f08d1f767c23ea861e6ef\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:16Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:15Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:39Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:39 crc kubenswrapper[4875]: I1007 07:57:39.246640 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:39Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:39 crc kubenswrapper[4875]: I1007 07:57:39.261135 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-zk2kz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7021fc56-b485-4ca6-80e8-56665ade004f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff571e5d1552af3387066929e3c02a939848e65fbf853aec600b03e0f72d5d3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d91377f30f2c7676df79d679bad49e0a162667214bd7e6ed4ad8ef3606a8752\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d91377f30f2c7676df79d679bad49e0a162667214bd7e6ed4ad8ef3606a8752\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecf0876b0824ab4fd9c692b0455050ac0a2d61663f74f13f578aebd6f3869f3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ecf0876b0824ab4fd9c692b0455050ac0a2d61663f74f13f578aebd6f3869f3f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9714afc60a059f6aa35d965ae1e15ea03e01cf9162a44d632ceed9d772e966bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9714afc60a059f6aa35d965ae1e15ea03e01cf9162a44d632ceed9d772e966bf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1f6109783534c4c80c81daf9eb0f04a9fbced0b143aeaa0c76683b9d2d38669\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c1f6109783534c4c80c81daf9eb0f04a9fbced0b143aeaa0c76683b9d2d38669\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c07a8fe8d39f3de33d3b132c33386a5c63a0948a1205f48e9a8788dd77ba8bc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c07a8fe8d39f3de33d3b132c33386a5c63a0948a1205f48e9a8788dd77ba8bc2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bce803064d5256265cdaf7f6e16d02780eab07d41d1f547dedd1c1b85972f543\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bce803064d5256265cdaf7f6e16d02780eab07d41d1f547dedd1c1b85972f543\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-zk2kz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:39Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:39 crc kubenswrapper[4875]: I1007 07:57:39.267600 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:39 crc kubenswrapper[4875]: I1007 07:57:39.267627 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:39 crc kubenswrapper[4875]: I1007 07:57:39.267635 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:39 crc kubenswrapper[4875]: I1007 07:57:39.267649 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:39 crc kubenswrapper[4875]: I1007 07:57:39.267660 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:39Z","lastTransitionTime":"2025-10-07T07:57:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:39 crc kubenswrapper[4875]: I1007 07:57:39.278407 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-8tcxj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f7806e48-48e7-4680-af2e-e93a05003370\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e5e9d96fa7102ca0dcbb0fcc156b21899217a32baae85be23379cca6c8a6a93c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2df6aadbe568d17b58738f194596f0e83432176769f7c5bd87e7d4000d3f1ed8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5eb9157d6f41a164c7cb952c983b441030a97a8f3b8009359440c5eba9846f26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8c31cbcb48e003dac873f108a5eb3aaff8d7016f7557fb683ae607d7819d7ceb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c944396320312ab92eb8dd7f1d771a00b965031717edbbeb2b467cee0788dec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6db9e77922c120dbfa1ccd55c96777ac3b18e0fcf8bb96956414858ae2c8fcff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d8ea2a287411218c24c1e8eaf4aeb64b4833e621385a4d8b32ffc0f79ce461f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5d8ea2a287411218c24c1e8eaf4aeb64b4833e621385a4d8b32ffc0f79ce461f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-07T07:57:37Z\\\",\\\"message\\\":\\\"hine-config-controller for endpointslice openshift-machine-config-operator/machine-config-controller-5lh94 as it is not a known egress service\\\\nI1007 07:57:37.500114 6921 egressservice_zone_node.go:110] Processing sync for Egress Service node crc\\\\nI1007 07:57:37.500150 6921 egressservice_zone_node.go:113] Finished syncing Egress Service node crc: 36.191µs\\\\nI1007 07:57:37.499908 6921 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1007 07:57:37.499418 6921 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1007 07:57:37.499597 6921 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1007 07:57:37.496449 6921 reflector.go:311] Stopping reflector *v1.EgressService (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI1007 07:57:37.500239 6921 egressservice_zone_endpointslice.go:80] Ignoring updating openshift-machine-config-operator/machine-config-operator for endpointslice openshift-machine-config-operator/machine-config-operator-g8487 as it is not a known egress service\\\\nI1007 07:57:37.500577 6921 ovnkube.go:599] Stopped ovnkube\\\\nI1007 07:57:37.500613 6921 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1007 07:57:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T07:57:36Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-8tcxj_openshift-ovn-kubernetes(f7806e48-48e7-4680-af2e-e93a05003370)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b3b1269130a0a1192546bec4aa5b2d4bfd65f381f6cd3948dd54d49b482e6d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e7cdfd8fed2f21e7063841e5382e668130288feffd52ef6fc5f9b5bc1894f2a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e7cdfd8fed2f21e7063841e5382e668130288feffd52ef6fc5f9b5bc1894f2a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-8tcxj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:39Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:39 crc kubenswrapper[4875]: I1007 07:57:39.289301 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ddx6l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0605d52-0cfc-4bcf-9218-1991257047cd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bdc7823b5b4f40adc245f9650864ebcb03f791dcbd6167f41c362bcdcc3a6655\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l9bwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:39Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ddx6l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:39Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:39 crc kubenswrapper[4875]: I1007 07:57:39.300775 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62ae4c53-7cf1-4229-84b4-045397dbcfba\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ae8881986c1b2abd841adfadbac7dc3d73096c5366485350536c08f478f9762\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://05f20f2103458077317614bcd5338d803c52c67e7dfc562103c817d33fa5bc79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f2213a20d36b8a125c6004957d5be402e0b85bbc2165ebd964ab499dfffb877\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://841f5c2218ad3912bf23dc2df80f24ca696829ba8e2c2d9722264688b25f0846\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:15Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:39Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:39 crc kubenswrapper[4875]: I1007 07:57:39.319672 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3527b3b9-1734-481d-9d80-9093e388afec\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7149243f8781d5a5e845821a8016bc06ec3678d69dfbde72d3429c48c64a98b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51d55feee1c9378afae0885cd063f39a2b4bd057db72c776941837d8d4098132\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6dc29635af9843109e3c200fc756d352e1a81b01dcc52a30a005291d1fa9f8d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e0b3573713f1b9d23177e5a1412d8db366ca718e6c19f6af9e234940505f2e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20382715fd9d79a3b60b16d950d6c4fe5b0d34ae14ad034f684d97762205263a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cfae59bc7d6183b2b5c6c04dd53b1bfc8b62abf90f67f74c7a56bf5039a5b50a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cfae59bc7d6183b2b5c6c04dd53b1bfc8b62abf90f67f74c7a56bf5039a5b50a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88968e9f639dc3664a87729128c531283b37b7c2da6aae70905f9f63a980fb54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88968e9f639dc3664a87729128c531283b37b7c2da6aae70905f9f63a980fb54\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://1ae0cbc996e8c2d24e8fbbf34b292d994bcc704c7b27fdc8b7fb7e64be0902ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1ae0cbc996e8c2d24e8fbbf34b292d994bcc704c7b27fdc8b7fb7e64be0902ca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:15Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:39Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:39 crc kubenswrapper[4875]: I1007 07:57:39.332289 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://455d92e6c0a43eabcbdc94940cb5bb0308a747e311fb79711b5ec8586d2f290c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e98da4809cdbbaa0bc7a3ed73df6eeb9260101f8336e16e3e93cba2be423f957\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:39Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:39 crc kubenswrapper[4875]: I1007 07:57:39.344371 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43e570e7b8391b0f38f3228dc0380ac8ee23e83f498989343bdee6a53c3ede41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:39Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:39 crc kubenswrapper[4875]: I1007 07:57:39.355211 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01fe5bea18c473e716831e2a9a876a8517d6de78b9b870929ecc1c4397d33fb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:39Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:39 crc kubenswrapper[4875]: I1007 07:57:39.366070 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wc2jq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e5a790e1-c591-4cfc-930f-4805a923790b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://512ba91c00a610fe56d1abb8dd82712ba14ce76b4148fe0c69e8864c9a87f977\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0fda9fd366641ca5799c0ca6c7aa926af6b41609e157c11a15b2077d4a5ce3aa\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-07T07:57:24Z\\\",\\\"message\\\":\\\"2025-10-07T07:56:39+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_790ceaf5-76bc-41c6-a081-e5e3ea810537\\\\n2025-10-07T07:56:39+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_790ceaf5-76bc-41c6-a081-e5e3ea810537 to /host/opt/cni/bin/\\\\n2025-10-07T07:56:39Z [verbose] multus-daemon started\\\\n2025-10-07T07:56:39Z [verbose] Readiness Indicator file check\\\\n2025-10-07T07:57:24Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:57:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4s782\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wc2jq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:39Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:39 crc kubenswrapper[4875]: I1007 07:57:39.369477 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:39 crc kubenswrapper[4875]: I1007 07:57:39.369539 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:39 crc kubenswrapper[4875]: I1007 07:57:39.369552 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:39 crc kubenswrapper[4875]: I1007 07:57:39.369573 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:39 crc kubenswrapper[4875]: I1007 07:57:39.369586 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:39Z","lastTransitionTime":"2025-10-07T07:57:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:39 crc kubenswrapper[4875]: I1007 07:57:39.376246 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3928c10c-c3da-41eb-96b2-629d67cfb31f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://27e5a1d7a65296f23b224f9ace2760c32b6e1ca146cf7a42a601de22632250d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qd4f6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34b235fca1c383e868641a953bb908901bcd10528ee9fb668c1f71eb6e0e978a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qd4f6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hx68m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:39Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:39 crc kubenswrapper[4875]: I1007 07:57:39.387075 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rbr4j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1d636971-3387-4f3c-b4a1-54a1da1e2fbe\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c836128e294716bdbbc7bf57f81f71a88656e83be509510722d13b3f66e7d3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7bd9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://12ef6aa200be5c0cc9b34435271b54e4852ad9118d7334d57e07c1f6f75100f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7bd9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:48Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-rbr4j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:39Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:39 crc kubenswrapper[4875]: I1007 07:57:39.399735 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"01cd6f59-4f16-41d3-b43b-cd3cb9efd9b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ca1c70fa4c2dd9e87cc15766e27c2735768f392d019d6004ae50c94595651c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4bed5cf79847998d3d72c69b2f5773d64affb1a1f13ecc3af99bda8307bb8d1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ba6ab5ec6a70a00059ba119970c6b139879c3c568475f2acf45b00f4ade9e22\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10e2645f8bd2ed4e578809e1a15c65b5a0bf19a2bc135dd5f626b2b3dfce0113\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://671f4d3e518bf0230af61a6bfc3f70b8cea7f0e8f972ad605a78cded0306b084\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"message\\\":\\\"le observer\\\\nW1007 07:56:35.379471 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1007 07:56:35.379613 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1007 07:56:35.380419 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1733405231/tls.crt::/tmp/serving-cert-1733405231/tls.key\\\\\\\"\\\\nI1007 07:56:35.960385 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1007 07:56:35.963357 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1007 07:56:35.963380 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1007 07:56:35.963401 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1007 07:56:35.963407 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1007 07:56:35.978178 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1007 07:56:35.978231 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1007 07:56:35.978242 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI1007 07:56:35.978236 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1007 07:56:35.978254 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1007 07:56:35.978304 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1007 07:56:35.978312 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1007 07:56:35.978319 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1007 07:56:35.979012 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0b72e1d82ba1cfc118ea122535db5c358f503a17b497c21c481e1b56bb578e8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec502d866371b40ecc3c8728889725cc1c2d9c8be18dbce17935992980fc1b9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec502d866371b40ecc3c8728889725cc1c2d9c8be18dbce17935992980fc1b9a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:39Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:39 crc kubenswrapper[4875]: I1007 07:57:39.410779 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5e7e967b-bfbd-4738-9082-7bc94f2f32fb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21a5c9629b4ec0c85ab80d893dd502732395c3dd8b418a686d3c2ac55e69fa8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1222e78ddf0e6e37cfd1354cd95301c49aac7faf38193b66c6a3b66b0617d018\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1222e78ddf0e6e37cfd1354cd95301c49aac7faf38193b66c6a3b66b0617d018\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:15Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:39Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:39 crc kubenswrapper[4875]: I1007 07:57:39.423013 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:39Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:39 crc kubenswrapper[4875]: I1007 07:57:39.433003 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-wk8rw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dce21abc-1295-4d45-bd26-07b7e37d674c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:50Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:50Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jx7vv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jx7vv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:50Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-wk8rw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:39Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:39 crc kubenswrapper[4875]: I1007 07:57:39.443215 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:39Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:39 crc kubenswrapper[4875]: I1007 07:57:39.451779 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-9tw9m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f8b02a8a-b8d2-4097-b768-132e4c46938a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f6efce0afada950650dfd1a7345a5132509ead755458e53915fcf3335ffe73e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-9tw9m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:39Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:39 crc kubenswrapper[4875]: I1007 07:57:39.471371 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:39 crc kubenswrapper[4875]: I1007 07:57:39.471399 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:39 crc kubenswrapper[4875]: I1007 07:57:39.471407 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:39 crc kubenswrapper[4875]: I1007 07:57:39.471420 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:39 crc kubenswrapper[4875]: I1007 07:57:39.471429 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:39Z","lastTransitionTime":"2025-10-07T07:57:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:39 crc kubenswrapper[4875]: I1007 07:57:39.502727 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 07:57:39 crc kubenswrapper[4875]: I1007 07:57:39.502840 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 07:57:39 crc kubenswrapper[4875]: I1007 07:57:39.502866 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 07:57:39 crc kubenswrapper[4875]: E1007 07:57:39.502933 4875 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 07:58:43.502909773 +0000 UTC m=+148.462680316 (durationBeforeRetry 1m4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 07:57:39 crc kubenswrapper[4875]: I1007 07:57:39.502981 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 07:57:39 crc kubenswrapper[4875]: E1007 07:57:39.503001 4875 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 07 07:57:39 crc kubenswrapper[4875]: E1007 07:57:39.503006 4875 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 07 07:57:39 crc kubenswrapper[4875]: E1007 07:57:39.503031 4875 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 07 07:57:39 crc kubenswrapper[4875]: E1007 07:57:39.503043 4875 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 07 07:57:39 crc kubenswrapper[4875]: E1007 07:57:39.503046 4875 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-07 07:58:43.503035167 +0000 UTC m=+148.462805710 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 07 07:57:39 crc kubenswrapper[4875]: E1007 07:57:39.503069 4875 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 07 07:57:39 crc kubenswrapper[4875]: I1007 07:57:39.503015 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 07:57:39 crc kubenswrapper[4875]: E1007 07:57:39.503081 4875 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 07 07:57:39 crc kubenswrapper[4875]: E1007 07:57:39.503093 4875 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 07 07:57:39 crc kubenswrapper[4875]: E1007 07:57:39.503107 4875 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 07 07:57:39 crc kubenswrapper[4875]: E1007 07:57:39.503072 4875 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-07 07:58:43.503064148 +0000 UTC m=+148.462834691 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 07 07:57:39 crc kubenswrapper[4875]: E1007 07:57:39.503167 4875 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-07 07:58:43.503154641 +0000 UTC m=+148.462925184 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 07 07:57:39 crc kubenswrapper[4875]: E1007 07:57:39.503187 4875 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-07 07:58:43.503176961 +0000 UTC m=+148.462947504 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 07 07:57:39 crc kubenswrapper[4875]: I1007 07:57:39.574186 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:39 crc kubenswrapper[4875]: I1007 07:57:39.574232 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:39 crc kubenswrapper[4875]: I1007 07:57:39.574240 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:39 crc kubenswrapper[4875]: I1007 07:57:39.574256 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:39 crc kubenswrapper[4875]: I1007 07:57:39.574266 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:39Z","lastTransitionTime":"2025-10-07T07:57:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:39 crc kubenswrapper[4875]: I1007 07:57:39.676700 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:39 crc kubenswrapper[4875]: I1007 07:57:39.676736 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:39 crc kubenswrapper[4875]: I1007 07:57:39.676748 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:39 crc kubenswrapper[4875]: I1007 07:57:39.676763 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:39 crc kubenswrapper[4875]: I1007 07:57:39.676772 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:39Z","lastTransitionTime":"2025-10-07T07:57:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:39 crc kubenswrapper[4875]: I1007 07:57:39.697434 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 07:57:39 crc kubenswrapper[4875]: I1007 07:57:39.697486 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 07:57:39 crc kubenswrapper[4875]: E1007 07:57:39.698067 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 07:57:39 crc kubenswrapper[4875]: E1007 07:57:39.698128 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 07:57:39 crc kubenswrapper[4875]: I1007 07:57:39.779021 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:39 crc kubenswrapper[4875]: I1007 07:57:39.779067 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:39 crc kubenswrapper[4875]: I1007 07:57:39.779083 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:39 crc kubenswrapper[4875]: I1007 07:57:39.779100 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:39 crc kubenswrapper[4875]: I1007 07:57:39.779112 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:39Z","lastTransitionTime":"2025-10-07T07:57:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:39 crc kubenswrapper[4875]: I1007 07:57:39.882217 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:39 crc kubenswrapper[4875]: I1007 07:57:39.882265 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:39 crc kubenswrapper[4875]: I1007 07:57:39.882273 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:39 crc kubenswrapper[4875]: I1007 07:57:39.882290 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:39 crc kubenswrapper[4875]: I1007 07:57:39.882300 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:39Z","lastTransitionTime":"2025-10-07T07:57:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:39 crc kubenswrapper[4875]: I1007 07:57:39.984720 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:39 crc kubenswrapper[4875]: I1007 07:57:39.984757 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:39 crc kubenswrapper[4875]: I1007 07:57:39.984766 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:39 crc kubenswrapper[4875]: I1007 07:57:39.984780 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:39 crc kubenswrapper[4875]: I1007 07:57:39.984791 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:39Z","lastTransitionTime":"2025-10-07T07:57:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:40 crc kubenswrapper[4875]: I1007 07:57:40.087302 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:40 crc kubenswrapper[4875]: I1007 07:57:40.087347 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:40 crc kubenswrapper[4875]: I1007 07:57:40.087358 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:40 crc kubenswrapper[4875]: I1007 07:57:40.087375 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:40 crc kubenswrapper[4875]: I1007 07:57:40.087386 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:40Z","lastTransitionTime":"2025-10-07T07:57:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:40 crc kubenswrapper[4875]: I1007 07:57:40.189212 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:40 crc kubenswrapper[4875]: I1007 07:57:40.189253 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:40 crc kubenswrapper[4875]: I1007 07:57:40.189262 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:40 crc kubenswrapper[4875]: I1007 07:57:40.189276 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:40 crc kubenswrapper[4875]: I1007 07:57:40.189286 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:40Z","lastTransitionTime":"2025-10-07T07:57:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:40 crc kubenswrapper[4875]: I1007 07:57:40.291228 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:40 crc kubenswrapper[4875]: I1007 07:57:40.291282 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:40 crc kubenswrapper[4875]: I1007 07:57:40.291294 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:40 crc kubenswrapper[4875]: I1007 07:57:40.291310 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:40 crc kubenswrapper[4875]: I1007 07:57:40.291321 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:40Z","lastTransitionTime":"2025-10-07T07:57:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:40 crc kubenswrapper[4875]: I1007 07:57:40.393575 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:40 crc kubenswrapper[4875]: I1007 07:57:40.393608 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:40 crc kubenswrapper[4875]: I1007 07:57:40.393618 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:40 crc kubenswrapper[4875]: I1007 07:57:40.393634 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:40 crc kubenswrapper[4875]: I1007 07:57:40.393645 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:40Z","lastTransitionTime":"2025-10-07T07:57:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:40 crc kubenswrapper[4875]: I1007 07:57:40.496499 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:40 crc kubenswrapper[4875]: I1007 07:57:40.496547 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:40 crc kubenswrapper[4875]: I1007 07:57:40.496558 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:40 crc kubenswrapper[4875]: I1007 07:57:40.496574 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:40 crc kubenswrapper[4875]: I1007 07:57:40.496584 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:40Z","lastTransitionTime":"2025-10-07T07:57:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:40 crc kubenswrapper[4875]: I1007 07:57:40.598956 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:40 crc kubenswrapper[4875]: I1007 07:57:40.598998 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:40 crc kubenswrapper[4875]: I1007 07:57:40.599010 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:40 crc kubenswrapper[4875]: I1007 07:57:40.599027 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:40 crc kubenswrapper[4875]: I1007 07:57:40.599040 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:40Z","lastTransitionTime":"2025-10-07T07:57:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:40 crc kubenswrapper[4875]: I1007 07:57:40.697119 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-wk8rw" Oct 07 07:57:40 crc kubenswrapper[4875]: I1007 07:57:40.697119 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 07:57:40 crc kubenswrapper[4875]: E1007 07:57:40.697246 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-wk8rw" podUID="dce21abc-1295-4d45-bd26-07b7e37d674c" Oct 07 07:57:40 crc kubenswrapper[4875]: E1007 07:57:40.697312 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 07:57:40 crc kubenswrapper[4875]: I1007 07:57:40.701850 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:40 crc kubenswrapper[4875]: I1007 07:57:40.701926 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:40 crc kubenswrapper[4875]: I1007 07:57:40.701939 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:40 crc kubenswrapper[4875]: I1007 07:57:40.701959 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:40 crc kubenswrapper[4875]: I1007 07:57:40.701971 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:40Z","lastTransitionTime":"2025-10-07T07:57:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:40 crc kubenswrapper[4875]: I1007 07:57:40.804590 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:40 crc kubenswrapper[4875]: I1007 07:57:40.804636 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:40 crc kubenswrapper[4875]: I1007 07:57:40.804650 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:40 crc kubenswrapper[4875]: I1007 07:57:40.804667 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:40 crc kubenswrapper[4875]: I1007 07:57:40.804678 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:40Z","lastTransitionTime":"2025-10-07T07:57:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:40 crc kubenswrapper[4875]: I1007 07:57:40.907126 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:40 crc kubenswrapper[4875]: I1007 07:57:40.907200 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:40 crc kubenswrapper[4875]: I1007 07:57:40.907212 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:40 crc kubenswrapper[4875]: I1007 07:57:40.907229 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:40 crc kubenswrapper[4875]: I1007 07:57:40.907261 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:40Z","lastTransitionTime":"2025-10-07T07:57:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:41 crc kubenswrapper[4875]: I1007 07:57:41.008783 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:41 crc kubenswrapper[4875]: I1007 07:57:41.008817 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:41 crc kubenswrapper[4875]: I1007 07:57:41.008828 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:41 crc kubenswrapper[4875]: I1007 07:57:41.008846 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:41 crc kubenswrapper[4875]: I1007 07:57:41.008856 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:41Z","lastTransitionTime":"2025-10-07T07:57:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:41 crc kubenswrapper[4875]: I1007 07:57:41.111500 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:41 crc kubenswrapper[4875]: I1007 07:57:41.111561 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:41 crc kubenswrapper[4875]: I1007 07:57:41.111577 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:41 crc kubenswrapper[4875]: I1007 07:57:41.111595 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:41 crc kubenswrapper[4875]: I1007 07:57:41.111605 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:41Z","lastTransitionTime":"2025-10-07T07:57:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:41 crc kubenswrapper[4875]: I1007 07:57:41.217079 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:41 crc kubenswrapper[4875]: I1007 07:57:41.217377 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:41 crc kubenswrapper[4875]: I1007 07:57:41.217386 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:41 crc kubenswrapper[4875]: I1007 07:57:41.217402 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:41 crc kubenswrapper[4875]: I1007 07:57:41.217413 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:41Z","lastTransitionTime":"2025-10-07T07:57:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:41 crc kubenswrapper[4875]: I1007 07:57:41.319570 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:41 crc kubenswrapper[4875]: I1007 07:57:41.319610 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:41 crc kubenswrapper[4875]: I1007 07:57:41.319622 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:41 crc kubenswrapper[4875]: I1007 07:57:41.319638 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:41 crc kubenswrapper[4875]: I1007 07:57:41.319649 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:41Z","lastTransitionTime":"2025-10-07T07:57:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:41 crc kubenswrapper[4875]: I1007 07:57:41.421321 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:41 crc kubenswrapper[4875]: I1007 07:57:41.421359 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:41 crc kubenswrapper[4875]: I1007 07:57:41.421372 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:41 crc kubenswrapper[4875]: I1007 07:57:41.421391 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:41 crc kubenswrapper[4875]: I1007 07:57:41.421402 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:41Z","lastTransitionTime":"2025-10-07T07:57:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:41 crc kubenswrapper[4875]: I1007 07:57:41.523539 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:41 crc kubenswrapper[4875]: I1007 07:57:41.523571 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:41 crc kubenswrapper[4875]: I1007 07:57:41.523597 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:41 crc kubenswrapper[4875]: I1007 07:57:41.523613 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:41 crc kubenswrapper[4875]: I1007 07:57:41.523622 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:41Z","lastTransitionTime":"2025-10-07T07:57:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:41 crc kubenswrapper[4875]: I1007 07:57:41.625346 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:41 crc kubenswrapper[4875]: I1007 07:57:41.625384 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:41 crc kubenswrapper[4875]: I1007 07:57:41.625397 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:41 crc kubenswrapper[4875]: I1007 07:57:41.625415 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:41 crc kubenswrapper[4875]: I1007 07:57:41.625425 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:41Z","lastTransitionTime":"2025-10-07T07:57:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:41 crc kubenswrapper[4875]: I1007 07:57:41.697143 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 07:57:41 crc kubenswrapper[4875]: I1007 07:57:41.697452 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 07:57:41 crc kubenswrapper[4875]: E1007 07:57:41.697598 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 07:57:41 crc kubenswrapper[4875]: E1007 07:57:41.697772 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 07:57:41 crc kubenswrapper[4875]: I1007 07:57:41.727611 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:41 crc kubenswrapper[4875]: I1007 07:57:41.727645 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:41 crc kubenswrapper[4875]: I1007 07:57:41.727654 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:41 crc kubenswrapper[4875]: I1007 07:57:41.727668 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:41 crc kubenswrapper[4875]: I1007 07:57:41.727677 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:41Z","lastTransitionTime":"2025-10-07T07:57:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:41 crc kubenswrapper[4875]: I1007 07:57:41.830758 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:41 crc kubenswrapper[4875]: I1007 07:57:41.830798 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:41 crc kubenswrapper[4875]: I1007 07:57:41.830810 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:41 crc kubenswrapper[4875]: I1007 07:57:41.830825 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:41 crc kubenswrapper[4875]: I1007 07:57:41.830836 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:41Z","lastTransitionTime":"2025-10-07T07:57:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:41 crc kubenswrapper[4875]: I1007 07:57:41.933330 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:41 crc kubenswrapper[4875]: I1007 07:57:41.933374 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:41 crc kubenswrapper[4875]: I1007 07:57:41.933387 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:41 crc kubenswrapper[4875]: I1007 07:57:41.933403 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:41 crc kubenswrapper[4875]: I1007 07:57:41.933414 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:41Z","lastTransitionTime":"2025-10-07T07:57:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:42 crc kubenswrapper[4875]: I1007 07:57:42.035965 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:42 crc kubenswrapper[4875]: I1007 07:57:42.036013 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:42 crc kubenswrapper[4875]: I1007 07:57:42.036021 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:42 crc kubenswrapper[4875]: I1007 07:57:42.036036 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:42 crc kubenswrapper[4875]: I1007 07:57:42.036045 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:42Z","lastTransitionTime":"2025-10-07T07:57:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:42 crc kubenswrapper[4875]: I1007 07:57:42.138175 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:42 crc kubenswrapper[4875]: I1007 07:57:42.138207 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:42 crc kubenswrapper[4875]: I1007 07:57:42.138215 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:42 crc kubenswrapper[4875]: I1007 07:57:42.138229 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:42 crc kubenswrapper[4875]: I1007 07:57:42.138238 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:42Z","lastTransitionTime":"2025-10-07T07:57:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:42 crc kubenswrapper[4875]: I1007 07:57:42.240445 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:42 crc kubenswrapper[4875]: I1007 07:57:42.240479 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:42 crc kubenswrapper[4875]: I1007 07:57:42.240488 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:42 crc kubenswrapper[4875]: I1007 07:57:42.240500 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:42 crc kubenswrapper[4875]: I1007 07:57:42.240510 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:42Z","lastTransitionTime":"2025-10-07T07:57:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:42 crc kubenswrapper[4875]: I1007 07:57:42.342998 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:42 crc kubenswrapper[4875]: I1007 07:57:42.343067 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:42 crc kubenswrapper[4875]: I1007 07:57:42.343077 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:42 crc kubenswrapper[4875]: I1007 07:57:42.343092 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:42 crc kubenswrapper[4875]: I1007 07:57:42.343103 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:42Z","lastTransitionTime":"2025-10-07T07:57:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:42 crc kubenswrapper[4875]: I1007 07:57:42.446294 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:42 crc kubenswrapper[4875]: I1007 07:57:42.446650 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:42 crc kubenswrapper[4875]: I1007 07:57:42.446737 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:42 crc kubenswrapper[4875]: I1007 07:57:42.446841 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:42 crc kubenswrapper[4875]: I1007 07:57:42.446969 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:42Z","lastTransitionTime":"2025-10-07T07:57:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:42 crc kubenswrapper[4875]: I1007 07:57:42.549472 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:42 crc kubenswrapper[4875]: I1007 07:57:42.549505 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:42 crc kubenswrapper[4875]: I1007 07:57:42.549516 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:42 crc kubenswrapper[4875]: I1007 07:57:42.549532 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:42 crc kubenswrapper[4875]: I1007 07:57:42.549545 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:42Z","lastTransitionTime":"2025-10-07T07:57:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:42 crc kubenswrapper[4875]: I1007 07:57:42.651181 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:42 crc kubenswrapper[4875]: I1007 07:57:42.651239 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:42 crc kubenswrapper[4875]: I1007 07:57:42.651255 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:42 crc kubenswrapper[4875]: I1007 07:57:42.651283 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:42 crc kubenswrapper[4875]: I1007 07:57:42.651298 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:42Z","lastTransitionTime":"2025-10-07T07:57:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:42 crc kubenswrapper[4875]: I1007 07:57:42.696643 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-wk8rw" Oct 07 07:57:42 crc kubenswrapper[4875]: I1007 07:57:42.696850 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 07:57:42 crc kubenswrapper[4875]: E1007 07:57:42.697155 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 07:57:42 crc kubenswrapper[4875]: E1007 07:57:42.697299 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-wk8rw" podUID="dce21abc-1295-4d45-bd26-07b7e37d674c" Oct 07 07:57:42 crc kubenswrapper[4875]: I1007 07:57:42.753938 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:42 crc kubenswrapper[4875]: I1007 07:57:42.753985 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:42 crc kubenswrapper[4875]: I1007 07:57:42.753994 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:42 crc kubenswrapper[4875]: I1007 07:57:42.754010 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:42 crc kubenswrapper[4875]: I1007 07:57:42.754021 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:42Z","lastTransitionTime":"2025-10-07T07:57:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:42 crc kubenswrapper[4875]: I1007 07:57:42.856607 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:42 crc kubenswrapper[4875]: I1007 07:57:42.856645 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:42 crc kubenswrapper[4875]: I1007 07:57:42.856655 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:42 crc kubenswrapper[4875]: I1007 07:57:42.856670 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:42 crc kubenswrapper[4875]: I1007 07:57:42.856679 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:42Z","lastTransitionTime":"2025-10-07T07:57:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:42 crc kubenswrapper[4875]: I1007 07:57:42.959491 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:42 crc kubenswrapper[4875]: I1007 07:57:42.959523 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:42 crc kubenswrapper[4875]: I1007 07:57:42.959531 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:42 crc kubenswrapper[4875]: I1007 07:57:42.959544 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:42 crc kubenswrapper[4875]: I1007 07:57:42.959555 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:42Z","lastTransitionTime":"2025-10-07T07:57:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:43 crc kubenswrapper[4875]: I1007 07:57:43.062109 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:43 crc kubenswrapper[4875]: I1007 07:57:43.062151 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:43 crc kubenswrapper[4875]: I1007 07:57:43.062163 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:43 crc kubenswrapper[4875]: I1007 07:57:43.062182 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:43 crc kubenswrapper[4875]: I1007 07:57:43.062193 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:43Z","lastTransitionTime":"2025-10-07T07:57:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:43 crc kubenswrapper[4875]: I1007 07:57:43.116819 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:43 crc kubenswrapper[4875]: I1007 07:57:43.116859 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:43 crc kubenswrapper[4875]: I1007 07:57:43.116868 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:43 crc kubenswrapper[4875]: I1007 07:57:43.116899 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:43 crc kubenswrapper[4875]: I1007 07:57:43.116910 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:43Z","lastTransitionTime":"2025-10-07T07:57:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:43 crc kubenswrapper[4875]: E1007 07:57:43.131409 4875 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T07:57:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:43Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T07:57:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:43Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T07:57:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:43Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T07:57:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:43Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"e390eb55-bfef-4c82-ba02-53426a3fd939\\\",\\\"systemUUID\\\":\\\"288fc606-a984-4194-ac49-303e4a239cb9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:43Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:43 crc kubenswrapper[4875]: I1007 07:57:43.135459 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:43 crc kubenswrapper[4875]: I1007 07:57:43.135503 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:43 crc kubenswrapper[4875]: I1007 07:57:43.135514 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:43 crc kubenswrapper[4875]: I1007 07:57:43.135532 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:43 crc kubenswrapper[4875]: I1007 07:57:43.135544 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:43Z","lastTransitionTime":"2025-10-07T07:57:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:43 crc kubenswrapper[4875]: E1007 07:57:43.149291 4875 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T07:57:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:43Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T07:57:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:43Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T07:57:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:43Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T07:57:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:43Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"e390eb55-bfef-4c82-ba02-53426a3fd939\\\",\\\"systemUUID\\\":\\\"288fc606-a984-4194-ac49-303e4a239cb9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:43Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:43 crc kubenswrapper[4875]: I1007 07:57:43.153088 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:43 crc kubenswrapper[4875]: I1007 07:57:43.153136 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:43 crc kubenswrapper[4875]: I1007 07:57:43.153149 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:43 crc kubenswrapper[4875]: I1007 07:57:43.153169 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:43 crc kubenswrapper[4875]: I1007 07:57:43.153185 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:43Z","lastTransitionTime":"2025-10-07T07:57:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:43 crc kubenswrapper[4875]: E1007 07:57:43.166072 4875 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T07:57:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:43Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T07:57:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:43Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T07:57:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:43Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T07:57:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:43Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"e390eb55-bfef-4c82-ba02-53426a3fd939\\\",\\\"systemUUID\\\":\\\"288fc606-a984-4194-ac49-303e4a239cb9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:43Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:43 crc kubenswrapper[4875]: I1007 07:57:43.169719 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:43 crc kubenswrapper[4875]: I1007 07:57:43.169750 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:43 crc kubenswrapper[4875]: I1007 07:57:43.169761 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:43 crc kubenswrapper[4875]: I1007 07:57:43.169776 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:43 crc kubenswrapper[4875]: I1007 07:57:43.169787 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:43Z","lastTransitionTime":"2025-10-07T07:57:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:43 crc kubenswrapper[4875]: E1007 07:57:43.180617 4875 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T07:57:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:43Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T07:57:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:43Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T07:57:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:43Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T07:57:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:43Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"e390eb55-bfef-4c82-ba02-53426a3fd939\\\",\\\"systemUUID\\\":\\\"288fc606-a984-4194-ac49-303e4a239cb9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:43Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:43 crc kubenswrapper[4875]: I1007 07:57:43.184314 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:43 crc kubenswrapper[4875]: I1007 07:57:43.184349 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:43 crc kubenswrapper[4875]: I1007 07:57:43.184357 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:43 crc kubenswrapper[4875]: I1007 07:57:43.184372 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:43 crc kubenswrapper[4875]: I1007 07:57:43.184380 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:43Z","lastTransitionTime":"2025-10-07T07:57:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:43 crc kubenswrapper[4875]: E1007 07:57:43.195766 4875 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T07:57:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:43Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T07:57:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:43Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T07:57:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:43Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T07:57:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:43Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"e390eb55-bfef-4c82-ba02-53426a3fd939\\\",\\\"systemUUID\\\":\\\"288fc606-a984-4194-ac49-303e4a239cb9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:43Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:43 crc kubenswrapper[4875]: E1007 07:57:43.195935 4875 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Oct 07 07:57:43 crc kubenswrapper[4875]: I1007 07:57:43.198072 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:43 crc kubenswrapper[4875]: I1007 07:57:43.198113 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:43 crc kubenswrapper[4875]: I1007 07:57:43.198129 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:43 crc kubenswrapper[4875]: I1007 07:57:43.198149 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:43 crc kubenswrapper[4875]: I1007 07:57:43.198166 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:43Z","lastTransitionTime":"2025-10-07T07:57:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:43 crc kubenswrapper[4875]: I1007 07:57:43.300338 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:43 crc kubenswrapper[4875]: I1007 07:57:43.300384 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:43 crc kubenswrapper[4875]: I1007 07:57:43.300395 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:43 crc kubenswrapper[4875]: I1007 07:57:43.300410 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:43 crc kubenswrapper[4875]: I1007 07:57:43.300423 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:43Z","lastTransitionTime":"2025-10-07T07:57:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:43 crc kubenswrapper[4875]: I1007 07:57:43.402932 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:43 crc kubenswrapper[4875]: I1007 07:57:43.402973 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:43 crc kubenswrapper[4875]: I1007 07:57:43.402982 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:43 crc kubenswrapper[4875]: I1007 07:57:43.402997 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:43 crc kubenswrapper[4875]: I1007 07:57:43.403006 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:43Z","lastTransitionTime":"2025-10-07T07:57:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:43 crc kubenswrapper[4875]: I1007 07:57:43.505577 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:43 crc kubenswrapper[4875]: I1007 07:57:43.505626 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:43 crc kubenswrapper[4875]: I1007 07:57:43.505636 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:43 crc kubenswrapper[4875]: I1007 07:57:43.505651 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:43 crc kubenswrapper[4875]: I1007 07:57:43.505664 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:43Z","lastTransitionTime":"2025-10-07T07:57:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:43 crc kubenswrapper[4875]: I1007 07:57:43.608413 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:43 crc kubenswrapper[4875]: I1007 07:57:43.609044 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:43 crc kubenswrapper[4875]: I1007 07:57:43.609085 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:43 crc kubenswrapper[4875]: I1007 07:57:43.609119 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:43 crc kubenswrapper[4875]: I1007 07:57:43.609134 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:43Z","lastTransitionTime":"2025-10-07T07:57:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:43 crc kubenswrapper[4875]: I1007 07:57:43.697511 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 07:57:43 crc kubenswrapper[4875]: E1007 07:57:43.697633 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 07:57:43 crc kubenswrapper[4875]: I1007 07:57:43.697523 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 07:57:43 crc kubenswrapper[4875]: E1007 07:57:43.697905 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 07:57:43 crc kubenswrapper[4875]: I1007 07:57:43.711346 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:43 crc kubenswrapper[4875]: I1007 07:57:43.711387 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:43 crc kubenswrapper[4875]: I1007 07:57:43.711398 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:43 crc kubenswrapper[4875]: I1007 07:57:43.711413 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:43 crc kubenswrapper[4875]: I1007 07:57:43.711425 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:43Z","lastTransitionTime":"2025-10-07T07:57:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:43 crc kubenswrapper[4875]: I1007 07:57:43.813585 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:43 crc kubenswrapper[4875]: I1007 07:57:43.813624 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:43 crc kubenswrapper[4875]: I1007 07:57:43.813636 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:43 crc kubenswrapper[4875]: I1007 07:57:43.813651 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:43 crc kubenswrapper[4875]: I1007 07:57:43.813660 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:43Z","lastTransitionTime":"2025-10-07T07:57:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:43 crc kubenswrapper[4875]: I1007 07:57:43.915352 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:43 crc kubenswrapper[4875]: I1007 07:57:43.915395 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:43 crc kubenswrapper[4875]: I1007 07:57:43.915404 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:43 crc kubenswrapper[4875]: I1007 07:57:43.915425 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:43 crc kubenswrapper[4875]: I1007 07:57:43.915442 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:43Z","lastTransitionTime":"2025-10-07T07:57:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:44 crc kubenswrapper[4875]: I1007 07:57:44.018173 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:44 crc kubenswrapper[4875]: I1007 07:57:44.018241 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:44 crc kubenswrapper[4875]: I1007 07:57:44.018254 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:44 crc kubenswrapper[4875]: I1007 07:57:44.018273 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:44 crc kubenswrapper[4875]: I1007 07:57:44.018286 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:44Z","lastTransitionTime":"2025-10-07T07:57:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:44 crc kubenswrapper[4875]: I1007 07:57:44.121697 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:44 crc kubenswrapper[4875]: I1007 07:57:44.121756 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:44 crc kubenswrapper[4875]: I1007 07:57:44.121766 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:44 crc kubenswrapper[4875]: I1007 07:57:44.121795 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:44 crc kubenswrapper[4875]: I1007 07:57:44.121809 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:44Z","lastTransitionTime":"2025-10-07T07:57:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:44 crc kubenswrapper[4875]: I1007 07:57:44.229001 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:44 crc kubenswrapper[4875]: I1007 07:57:44.229055 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:44 crc kubenswrapper[4875]: I1007 07:57:44.229074 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:44 crc kubenswrapper[4875]: I1007 07:57:44.229093 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:44 crc kubenswrapper[4875]: I1007 07:57:44.229103 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:44Z","lastTransitionTime":"2025-10-07T07:57:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:44 crc kubenswrapper[4875]: I1007 07:57:44.332564 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:44 crc kubenswrapper[4875]: I1007 07:57:44.332599 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:44 crc kubenswrapper[4875]: I1007 07:57:44.332608 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:44 crc kubenswrapper[4875]: I1007 07:57:44.332624 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:44 crc kubenswrapper[4875]: I1007 07:57:44.332634 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:44Z","lastTransitionTime":"2025-10-07T07:57:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:44 crc kubenswrapper[4875]: I1007 07:57:44.436095 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:44 crc kubenswrapper[4875]: I1007 07:57:44.436165 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:44 crc kubenswrapper[4875]: I1007 07:57:44.436188 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:44 crc kubenswrapper[4875]: I1007 07:57:44.436216 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:44 crc kubenswrapper[4875]: I1007 07:57:44.436236 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:44Z","lastTransitionTime":"2025-10-07T07:57:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:44 crc kubenswrapper[4875]: I1007 07:57:44.539719 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:44 crc kubenswrapper[4875]: I1007 07:57:44.539779 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:44 crc kubenswrapper[4875]: I1007 07:57:44.539792 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:44 crc kubenswrapper[4875]: I1007 07:57:44.539814 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:44 crc kubenswrapper[4875]: I1007 07:57:44.539829 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:44Z","lastTransitionTime":"2025-10-07T07:57:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:44 crc kubenswrapper[4875]: I1007 07:57:44.643295 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:44 crc kubenswrapper[4875]: I1007 07:57:44.643364 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:44 crc kubenswrapper[4875]: I1007 07:57:44.643383 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:44 crc kubenswrapper[4875]: I1007 07:57:44.643408 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:44 crc kubenswrapper[4875]: I1007 07:57:44.643430 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:44Z","lastTransitionTime":"2025-10-07T07:57:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:44 crc kubenswrapper[4875]: I1007 07:57:44.697048 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 07:57:44 crc kubenswrapper[4875]: I1007 07:57:44.697069 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-wk8rw" Oct 07 07:57:44 crc kubenswrapper[4875]: E1007 07:57:44.697252 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 07:57:44 crc kubenswrapper[4875]: E1007 07:57:44.697343 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-wk8rw" podUID="dce21abc-1295-4d45-bd26-07b7e37d674c" Oct 07 07:57:44 crc kubenswrapper[4875]: I1007 07:57:44.746968 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:44 crc kubenswrapper[4875]: I1007 07:57:44.747061 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:44 crc kubenswrapper[4875]: I1007 07:57:44.747087 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:44 crc kubenswrapper[4875]: I1007 07:57:44.747126 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:44 crc kubenswrapper[4875]: I1007 07:57:44.747156 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:44Z","lastTransitionTime":"2025-10-07T07:57:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:44 crc kubenswrapper[4875]: I1007 07:57:44.850340 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:44 crc kubenswrapper[4875]: I1007 07:57:44.850411 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:44 crc kubenswrapper[4875]: I1007 07:57:44.850435 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:44 crc kubenswrapper[4875]: I1007 07:57:44.850464 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:44 crc kubenswrapper[4875]: I1007 07:57:44.850486 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:44Z","lastTransitionTime":"2025-10-07T07:57:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:44 crc kubenswrapper[4875]: I1007 07:57:44.952662 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:44 crc kubenswrapper[4875]: I1007 07:57:44.952700 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:44 crc kubenswrapper[4875]: I1007 07:57:44.952711 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:44 crc kubenswrapper[4875]: I1007 07:57:44.952724 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:44 crc kubenswrapper[4875]: I1007 07:57:44.952733 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:44Z","lastTransitionTime":"2025-10-07T07:57:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:45 crc kubenswrapper[4875]: I1007 07:57:45.055638 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:45 crc kubenswrapper[4875]: I1007 07:57:45.055704 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:45 crc kubenswrapper[4875]: I1007 07:57:45.055714 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:45 crc kubenswrapper[4875]: I1007 07:57:45.055727 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:45 crc kubenswrapper[4875]: I1007 07:57:45.055735 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:45Z","lastTransitionTime":"2025-10-07T07:57:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:45 crc kubenswrapper[4875]: I1007 07:57:45.158089 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:45 crc kubenswrapper[4875]: I1007 07:57:45.158161 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:45 crc kubenswrapper[4875]: I1007 07:57:45.158174 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:45 crc kubenswrapper[4875]: I1007 07:57:45.158193 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:45 crc kubenswrapper[4875]: I1007 07:57:45.158205 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:45Z","lastTransitionTime":"2025-10-07T07:57:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:45 crc kubenswrapper[4875]: I1007 07:57:45.260249 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:45 crc kubenswrapper[4875]: I1007 07:57:45.260281 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:45 crc kubenswrapper[4875]: I1007 07:57:45.260290 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:45 crc kubenswrapper[4875]: I1007 07:57:45.260304 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:45 crc kubenswrapper[4875]: I1007 07:57:45.260314 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:45Z","lastTransitionTime":"2025-10-07T07:57:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:45 crc kubenswrapper[4875]: I1007 07:57:45.363618 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:45 crc kubenswrapper[4875]: I1007 07:57:45.363659 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:45 crc kubenswrapper[4875]: I1007 07:57:45.363668 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:45 crc kubenswrapper[4875]: I1007 07:57:45.363687 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:45 crc kubenswrapper[4875]: I1007 07:57:45.363697 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:45Z","lastTransitionTime":"2025-10-07T07:57:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:45 crc kubenswrapper[4875]: I1007 07:57:45.465355 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:45 crc kubenswrapper[4875]: I1007 07:57:45.465397 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:45 crc kubenswrapper[4875]: I1007 07:57:45.465410 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:45 crc kubenswrapper[4875]: I1007 07:57:45.465427 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:45 crc kubenswrapper[4875]: I1007 07:57:45.465444 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:45Z","lastTransitionTime":"2025-10-07T07:57:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:45 crc kubenswrapper[4875]: I1007 07:57:45.568335 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:45 crc kubenswrapper[4875]: I1007 07:57:45.568378 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:45 crc kubenswrapper[4875]: I1007 07:57:45.568389 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:45 crc kubenswrapper[4875]: I1007 07:57:45.568404 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:45 crc kubenswrapper[4875]: I1007 07:57:45.568415 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:45Z","lastTransitionTime":"2025-10-07T07:57:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:45 crc kubenswrapper[4875]: I1007 07:57:45.671643 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:45 crc kubenswrapper[4875]: I1007 07:57:45.671697 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:45 crc kubenswrapper[4875]: I1007 07:57:45.671709 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:45 crc kubenswrapper[4875]: I1007 07:57:45.671727 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:45 crc kubenswrapper[4875]: I1007 07:57:45.671738 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:45Z","lastTransitionTime":"2025-10-07T07:57:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:45 crc kubenswrapper[4875]: I1007 07:57:45.696744 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 07:57:45 crc kubenswrapper[4875]: E1007 07:57:45.696851 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 07:57:45 crc kubenswrapper[4875]: I1007 07:57:45.697016 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 07:57:45 crc kubenswrapper[4875]: E1007 07:57:45.697182 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 07:57:45 crc kubenswrapper[4875]: I1007 07:57:45.708551 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:45Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:45 crc kubenswrapper[4875]: I1007 07:57:45.718465 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-9tw9m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f8b02a8a-b8d2-4097-b768-132e4c46938a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f6efce0afada950650dfd1a7345a5132509ead755458e53915fcf3335ffe73e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-9tw9m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:45Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:45 crc kubenswrapper[4875]: I1007 07:57:45.728478 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4ed41f5e-e186-4ede-a0a5-d5a015781ba1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce6527e17c61ccce5c81a87b9a6d3139c9f17a0736c08dfbb8907610d6270adb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c8f4b6a57610b6440afa3652ea5c2926072ad3bc658c72c30c8d28a2832cf900\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8a810dc9c2d8d92af77e0b4c1a21ab24e7da3de04f9c1b00b8399ec0b2cf4ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://965fc2ef9b26d12ee6c5e6adb88200cb52574c8f861f08d1f767c23ea861e6ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://965fc2ef9b26d12ee6c5e6adb88200cb52574c8f861f08d1f767c23ea861e6ef\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:16Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:15Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:45Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:45 crc kubenswrapper[4875]: I1007 07:57:45.740477 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:45Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:45 crc kubenswrapper[4875]: I1007 07:57:45.754212 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-zk2kz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7021fc56-b485-4ca6-80e8-56665ade004f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff571e5d1552af3387066929e3c02a939848e65fbf853aec600b03e0f72d5d3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d91377f30f2c7676df79d679bad49e0a162667214bd7e6ed4ad8ef3606a8752\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d91377f30f2c7676df79d679bad49e0a162667214bd7e6ed4ad8ef3606a8752\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecf0876b0824ab4fd9c692b0455050ac0a2d61663f74f13f578aebd6f3869f3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ecf0876b0824ab4fd9c692b0455050ac0a2d61663f74f13f578aebd6f3869f3f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9714afc60a059f6aa35d965ae1e15ea03e01cf9162a44d632ceed9d772e966bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9714afc60a059f6aa35d965ae1e15ea03e01cf9162a44d632ceed9d772e966bf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1f6109783534c4c80c81daf9eb0f04a9fbced0b143aeaa0c76683b9d2d38669\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c1f6109783534c4c80c81daf9eb0f04a9fbced0b143aeaa0c76683b9d2d38669\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c07a8fe8d39f3de33d3b132c33386a5c63a0948a1205f48e9a8788dd77ba8bc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c07a8fe8d39f3de33d3b132c33386a5c63a0948a1205f48e9a8788dd77ba8bc2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bce803064d5256265cdaf7f6e16d02780eab07d41d1f547dedd1c1b85972f543\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bce803064d5256265cdaf7f6e16d02780eab07d41d1f547dedd1c1b85972f543\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-zk2kz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:45Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:45 crc kubenswrapper[4875]: I1007 07:57:45.771989 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-8tcxj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f7806e48-48e7-4680-af2e-e93a05003370\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e5e9d96fa7102ca0dcbb0fcc156b21899217a32baae85be23379cca6c8a6a93c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2df6aadbe568d17b58738f194596f0e83432176769f7c5bd87e7d4000d3f1ed8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5eb9157d6f41a164c7cb952c983b441030a97a8f3b8009359440c5eba9846f26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8c31cbcb48e003dac873f108a5eb3aaff8d7016f7557fb683ae607d7819d7ceb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c944396320312ab92eb8dd7f1d771a00b965031717edbbeb2b467cee0788dec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6db9e77922c120dbfa1ccd55c96777ac3b18e0fcf8bb96956414858ae2c8fcff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d8ea2a287411218c24c1e8eaf4aeb64b4833e621385a4d8b32ffc0f79ce461f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5d8ea2a287411218c24c1e8eaf4aeb64b4833e621385a4d8b32ffc0f79ce461f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-07T07:57:37Z\\\",\\\"message\\\":\\\"hine-config-controller for endpointslice openshift-machine-config-operator/machine-config-controller-5lh94 as it is not a known egress service\\\\nI1007 07:57:37.500114 6921 egressservice_zone_node.go:110] Processing sync for Egress Service node crc\\\\nI1007 07:57:37.500150 6921 egressservice_zone_node.go:113] Finished syncing Egress Service node crc: 36.191µs\\\\nI1007 07:57:37.499908 6921 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1007 07:57:37.499418 6921 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1007 07:57:37.499597 6921 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1007 07:57:37.496449 6921 reflector.go:311] Stopping reflector *v1.EgressService (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI1007 07:57:37.500239 6921 egressservice_zone_endpointslice.go:80] Ignoring updating openshift-machine-config-operator/machine-config-operator for endpointslice openshift-machine-config-operator/machine-config-operator-g8487 as it is not a known egress service\\\\nI1007 07:57:37.500577 6921 ovnkube.go:599] Stopped ovnkube\\\\nI1007 07:57:37.500613 6921 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1007 07:57:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T07:57:36Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-8tcxj_openshift-ovn-kubernetes(f7806e48-48e7-4680-af2e-e93a05003370)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b3b1269130a0a1192546bec4aa5b2d4bfd65f381f6cd3948dd54d49b482e6d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e7cdfd8fed2f21e7063841e5382e668130288feffd52ef6fc5f9b5bc1894f2a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e7cdfd8fed2f21e7063841e5382e668130288feffd52ef6fc5f9b5bc1894f2a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-8tcxj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:45Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:45 crc kubenswrapper[4875]: I1007 07:57:45.774179 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:45 crc kubenswrapper[4875]: I1007 07:57:45.774226 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:45 crc kubenswrapper[4875]: I1007 07:57:45.774237 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:45 crc kubenswrapper[4875]: I1007 07:57:45.774255 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:45 crc kubenswrapper[4875]: I1007 07:57:45.774268 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:45Z","lastTransitionTime":"2025-10-07T07:57:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:45 crc kubenswrapper[4875]: I1007 07:57:45.783605 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ddx6l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0605d52-0cfc-4bcf-9218-1991257047cd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bdc7823b5b4f40adc245f9650864ebcb03f791dcbd6167f41c362bcdcc3a6655\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l9bwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:39Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ddx6l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:45Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:45 crc kubenswrapper[4875]: I1007 07:57:45.796162 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62ae4c53-7cf1-4229-84b4-045397dbcfba\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ae8881986c1b2abd841adfadbac7dc3d73096c5366485350536c08f478f9762\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://05f20f2103458077317614bcd5338d803c52c67e7dfc562103c817d33fa5bc79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f2213a20d36b8a125c6004957d5be402e0b85bbc2165ebd964ab499dfffb877\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://841f5c2218ad3912bf23dc2df80f24ca696829ba8e2c2d9722264688b25f0846\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:15Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:45Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:45 crc kubenswrapper[4875]: I1007 07:57:45.817392 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3527b3b9-1734-481d-9d80-9093e388afec\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7149243f8781d5a5e845821a8016bc06ec3678d69dfbde72d3429c48c64a98b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51d55feee1c9378afae0885cd063f39a2b4bd057db72c776941837d8d4098132\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6dc29635af9843109e3c200fc756d352e1a81b01dcc52a30a005291d1fa9f8d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e0b3573713f1b9d23177e5a1412d8db366ca718e6c19f6af9e234940505f2e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20382715fd9d79a3b60b16d950d6c4fe5b0d34ae14ad034f684d97762205263a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cfae59bc7d6183b2b5c6c04dd53b1bfc8b62abf90f67f74c7a56bf5039a5b50a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cfae59bc7d6183b2b5c6c04dd53b1bfc8b62abf90f67f74c7a56bf5039a5b50a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88968e9f639dc3664a87729128c531283b37b7c2da6aae70905f9f63a980fb54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88968e9f639dc3664a87729128c531283b37b7c2da6aae70905f9f63a980fb54\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://1ae0cbc996e8c2d24e8fbbf34b292d994bcc704c7b27fdc8b7fb7e64be0902ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1ae0cbc996e8c2d24e8fbbf34b292d994bcc704c7b27fdc8b7fb7e64be0902ca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:15Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:45Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:45 crc kubenswrapper[4875]: I1007 07:57:45.831602 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://455d92e6c0a43eabcbdc94940cb5bb0308a747e311fb79711b5ec8586d2f290c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e98da4809cdbbaa0bc7a3ed73df6eeb9260101f8336e16e3e93cba2be423f957\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:45Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:45 crc kubenswrapper[4875]: I1007 07:57:45.848238 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43e570e7b8391b0f38f3228dc0380ac8ee23e83f498989343bdee6a53c3ede41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:45Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:45 crc kubenswrapper[4875]: I1007 07:57:45.860568 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01fe5bea18c473e716831e2a9a876a8517d6de78b9b870929ecc1c4397d33fb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:45Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:45 crc kubenswrapper[4875]: I1007 07:57:45.873675 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wc2jq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e5a790e1-c591-4cfc-930f-4805a923790b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://512ba91c00a610fe56d1abb8dd82712ba14ce76b4148fe0c69e8864c9a87f977\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0fda9fd366641ca5799c0ca6c7aa926af6b41609e157c11a15b2077d4a5ce3aa\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-07T07:57:24Z\\\",\\\"message\\\":\\\"2025-10-07T07:56:39+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_790ceaf5-76bc-41c6-a081-e5e3ea810537\\\\n2025-10-07T07:56:39+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_790ceaf5-76bc-41c6-a081-e5e3ea810537 to /host/opt/cni/bin/\\\\n2025-10-07T07:56:39Z [verbose] multus-daemon started\\\\n2025-10-07T07:56:39Z [verbose] Readiness Indicator file check\\\\n2025-10-07T07:57:24Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:57:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4s782\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wc2jq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:45Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:45 crc kubenswrapper[4875]: I1007 07:57:45.876490 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:45 crc kubenswrapper[4875]: I1007 07:57:45.876565 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:45 crc kubenswrapper[4875]: I1007 07:57:45.876763 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:45 crc kubenswrapper[4875]: I1007 07:57:45.876782 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:45 crc kubenswrapper[4875]: I1007 07:57:45.876793 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:45Z","lastTransitionTime":"2025-10-07T07:57:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:45 crc kubenswrapper[4875]: I1007 07:57:45.886012 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3928c10c-c3da-41eb-96b2-629d67cfb31f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://27e5a1d7a65296f23b224f9ace2760c32b6e1ca146cf7a42a601de22632250d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qd4f6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34b235fca1c383e868641a953bb908901bcd10528ee9fb668c1f71eb6e0e978a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qd4f6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hx68m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:45Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:45 crc kubenswrapper[4875]: I1007 07:57:45.898862 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rbr4j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1d636971-3387-4f3c-b4a1-54a1da1e2fbe\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c836128e294716bdbbc7bf57f81f71a88656e83be509510722d13b3f66e7d3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7bd9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://12ef6aa200be5c0cc9b34435271b54e4852ad9118d7334d57e07c1f6f75100f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7bd9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:48Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-rbr4j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:45Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:45 crc kubenswrapper[4875]: I1007 07:57:45.915499 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"01cd6f59-4f16-41d3-b43b-cd3cb9efd9b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ca1c70fa4c2dd9e87cc15766e27c2735768f392d019d6004ae50c94595651c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4bed5cf79847998d3d72c69b2f5773d64affb1a1f13ecc3af99bda8307bb8d1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ba6ab5ec6a70a00059ba119970c6b139879c3c568475f2acf45b00f4ade9e22\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10e2645f8bd2ed4e578809e1a15c65b5a0bf19a2bc135dd5f626b2b3dfce0113\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://671f4d3e518bf0230af61a6bfc3f70b8cea7f0e8f972ad605a78cded0306b084\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"message\\\":\\\"le observer\\\\nW1007 07:56:35.379471 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1007 07:56:35.379613 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1007 07:56:35.380419 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1733405231/tls.crt::/tmp/serving-cert-1733405231/tls.key\\\\\\\"\\\\nI1007 07:56:35.960385 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1007 07:56:35.963357 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1007 07:56:35.963380 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1007 07:56:35.963401 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1007 07:56:35.963407 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1007 07:56:35.978178 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1007 07:56:35.978231 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1007 07:56:35.978242 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI1007 07:56:35.978236 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1007 07:56:35.978254 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1007 07:56:35.978304 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1007 07:56:35.978312 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1007 07:56:35.978319 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1007 07:56:35.979012 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0b72e1d82ba1cfc118ea122535db5c358f503a17b497c21c481e1b56bb578e8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec502d866371b40ecc3c8728889725cc1c2d9c8be18dbce17935992980fc1b9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec502d866371b40ecc3c8728889725cc1c2d9c8be18dbce17935992980fc1b9a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:45Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:45 crc kubenswrapper[4875]: I1007 07:57:45.926442 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5e7e967b-bfbd-4738-9082-7bc94f2f32fb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21a5c9629b4ec0c85ab80d893dd502732395c3dd8b418a686d3c2ac55e69fa8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1222e78ddf0e6e37cfd1354cd95301c49aac7faf38193b66c6a3b66b0617d018\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1222e78ddf0e6e37cfd1354cd95301c49aac7faf38193b66c6a3b66b0617d018\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:15Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:45Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:45 crc kubenswrapper[4875]: I1007 07:57:45.937659 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:45Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:45 crc kubenswrapper[4875]: I1007 07:57:45.948160 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-wk8rw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dce21abc-1295-4d45-bd26-07b7e37d674c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:50Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:50Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jx7vv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jx7vv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:50Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-wk8rw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:45Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:45 crc kubenswrapper[4875]: I1007 07:57:45.979344 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:45 crc kubenswrapper[4875]: I1007 07:57:45.979382 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:45 crc kubenswrapper[4875]: I1007 07:57:45.979391 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:45 crc kubenswrapper[4875]: I1007 07:57:45.979405 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:45 crc kubenswrapper[4875]: I1007 07:57:45.979413 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:45Z","lastTransitionTime":"2025-10-07T07:57:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:46 crc kubenswrapper[4875]: I1007 07:57:46.081794 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:46 crc kubenswrapper[4875]: I1007 07:57:46.081837 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:46 crc kubenswrapper[4875]: I1007 07:57:46.081846 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:46 crc kubenswrapper[4875]: I1007 07:57:46.081863 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:46 crc kubenswrapper[4875]: I1007 07:57:46.081892 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:46Z","lastTransitionTime":"2025-10-07T07:57:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:46 crc kubenswrapper[4875]: I1007 07:57:46.183773 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:46 crc kubenswrapper[4875]: I1007 07:57:46.183821 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:46 crc kubenswrapper[4875]: I1007 07:57:46.183833 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:46 crc kubenswrapper[4875]: I1007 07:57:46.183852 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:46 crc kubenswrapper[4875]: I1007 07:57:46.183864 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:46Z","lastTransitionTime":"2025-10-07T07:57:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:46 crc kubenswrapper[4875]: I1007 07:57:46.286244 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:46 crc kubenswrapper[4875]: I1007 07:57:46.286316 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:46 crc kubenswrapper[4875]: I1007 07:57:46.286326 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:46 crc kubenswrapper[4875]: I1007 07:57:46.286342 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:46 crc kubenswrapper[4875]: I1007 07:57:46.286354 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:46Z","lastTransitionTime":"2025-10-07T07:57:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:46 crc kubenswrapper[4875]: I1007 07:57:46.388815 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:46 crc kubenswrapper[4875]: I1007 07:57:46.388862 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:46 crc kubenswrapper[4875]: I1007 07:57:46.388891 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:46 crc kubenswrapper[4875]: I1007 07:57:46.388908 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:46 crc kubenswrapper[4875]: I1007 07:57:46.388918 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:46Z","lastTransitionTime":"2025-10-07T07:57:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:46 crc kubenswrapper[4875]: I1007 07:57:46.491075 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:46 crc kubenswrapper[4875]: I1007 07:57:46.491103 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:46 crc kubenswrapper[4875]: I1007 07:57:46.491111 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:46 crc kubenswrapper[4875]: I1007 07:57:46.491127 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:46 crc kubenswrapper[4875]: I1007 07:57:46.491150 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:46Z","lastTransitionTime":"2025-10-07T07:57:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:46 crc kubenswrapper[4875]: I1007 07:57:46.593133 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:46 crc kubenswrapper[4875]: I1007 07:57:46.593169 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:46 crc kubenswrapper[4875]: I1007 07:57:46.593178 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:46 crc kubenswrapper[4875]: I1007 07:57:46.593190 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:46 crc kubenswrapper[4875]: I1007 07:57:46.593199 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:46Z","lastTransitionTime":"2025-10-07T07:57:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:46 crc kubenswrapper[4875]: I1007 07:57:46.696049 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:46 crc kubenswrapper[4875]: I1007 07:57:46.696110 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:46 crc kubenswrapper[4875]: I1007 07:57:46.696120 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:46 crc kubenswrapper[4875]: I1007 07:57:46.696136 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:46 crc kubenswrapper[4875]: I1007 07:57:46.696146 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:46Z","lastTransitionTime":"2025-10-07T07:57:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:46 crc kubenswrapper[4875]: I1007 07:57:46.696660 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 07:57:46 crc kubenswrapper[4875]: I1007 07:57:46.696727 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-wk8rw" Oct 07 07:57:46 crc kubenswrapper[4875]: E1007 07:57:46.696791 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 07:57:46 crc kubenswrapper[4875]: E1007 07:57:46.696922 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-wk8rw" podUID="dce21abc-1295-4d45-bd26-07b7e37d674c" Oct 07 07:57:46 crc kubenswrapper[4875]: I1007 07:57:46.798140 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:46 crc kubenswrapper[4875]: I1007 07:57:46.798184 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:46 crc kubenswrapper[4875]: I1007 07:57:46.798193 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:46 crc kubenswrapper[4875]: I1007 07:57:46.798208 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:46 crc kubenswrapper[4875]: I1007 07:57:46.798217 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:46Z","lastTransitionTime":"2025-10-07T07:57:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:46 crc kubenswrapper[4875]: I1007 07:57:46.901079 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:46 crc kubenswrapper[4875]: I1007 07:57:46.901127 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:46 crc kubenswrapper[4875]: I1007 07:57:46.901138 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:46 crc kubenswrapper[4875]: I1007 07:57:46.901154 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:46 crc kubenswrapper[4875]: I1007 07:57:46.901165 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:46Z","lastTransitionTime":"2025-10-07T07:57:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:47 crc kubenswrapper[4875]: I1007 07:57:47.003479 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:47 crc kubenswrapper[4875]: I1007 07:57:47.003516 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:47 crc kubenswrapper[4875]: I1007 07:57:47.003524 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:47 crc kubenswrapper[4875]: I1007 07:57:47.003538 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:47 crc kubenswrapper[4875]: I1007 07:57:47.003547 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:47Z","lastTransitionTime":"2025-10-07T07:57:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:47 crc kubenswrapper[4875]: I1007 07:57:47.106025 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:47 crc kubenswrapper[4875]: I1007 07:57:47.106061 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:47 crc kubenswrapper[4875]: I1007 07:57:47.106071 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:47 crc kubenswrapper[4875]: I1007 07:57:47.106088 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:47 crc kubenswrapper[4875]: I1007 07:57:47.106099 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:47Z","lastTransitionTime":"2025-10-07T07:57:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:47 crc kubenswrapper[4875]: I1007 07:57:47.208502 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:47 crc kubenswrapper[4875]: I1007 07:57:47.208539 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:47 crc kubenswrapper[4875]: I1007 07:57:47.208547 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:47 crc kubenswrapper[4875]: I1007 07:57:47.208565 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:47 crc kubenswrapper[4875]: I1007 07:57:47.208578 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:47Z","lastTransitionTime":"2025-10-07T07:57:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:47 crc kubenswrapper[4875]: I1007 07:57:47.311032 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:47 crc kubenswrapper[4875]: I1007 07:57:47.311091 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:47 crc kubenswrapper[4875]: I1007 07:57:47.311105 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:47 crc kubenswrapper[4875]: I1007 07:57:47.311124 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:47 crc kubenswrapper[4875]: I1007 07:57:47.311138 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:47Z","lastTransitionTime":"2025-10-07T07:57:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:47 crc kubenswrapper[4875]: I1007 07:57:47.413008 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:47 crc kubenswrapper[4875]: I1007 07:57:47.413040 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:47 crc kubenswrapper[4875]: I1007 07:57:47.413049 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:47 crc kubenswrapper[4875]: I1007 07:57:47.413089 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:47 crc kubenswrapper[4875]: I1007 07:57:47.413099 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:47Z","lastTransitionTime":"2025-10-07T07:57:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:47 crc kubenswrapper[4875]: I1007 07:57:47.514830 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:47 crc kubenswrapper[4875]: I1007 07:57:47.514924 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:47 crc kubenswrapper[4875]: I1007 07:57:47.514938 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:47 crc kubenswrapper[4875]: I1007 07:57:47.514963 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:47 crc kubenswrapper[4875]: I1007 07:57:47.514976 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:47Z","lastTransitionTime":"2025-10-07T07:57:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:47 crc kubenswrapper[4875]: I1007 07:57:47.617163 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:47 crc kubenswrapper[4875]: I1007 07:57:47.617241 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:47 crc kubenswrapper[4875]: I1007 07:57:47.617270 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:47 crc kubenswrapper[4875]: I1007 07:57:47.617286 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:47 crc kubenswrapper[4875]: I1007 07:57:47.617296 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:47Z","lastTransitionTime":"2025-10-07T07:57:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:47 crc kubenswrapper[4875]: I1007 07:57:47.696778 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 07:57:47 crc kubenswrapper[4875]: I1007 07:57:47.696850 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 07:57:47 crc kubenswrapper[4875]: E1007 07:57:47.696985 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 07:57:47 crc kubenswrapper[4875]: E1007 07:57:47.697202 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 07:57:47 crc kubenswrapper[4875]: I1007 07:57:47.719352 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:47 crc kubenswrapper[4875]: I1007 07:57:47.719392 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:47 crc kubenswrapper[4875]: I1007 07:57:47.719403 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:47 crc kubenswrapper[4875]: I1007 07:57:47.719417 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:47 crc kubenswrapper[4875]: I1007 07:57:47.719429 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:47Z","lastTransitionTime":"2025-10-07T07:57:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:47 crc kubenswrapper[4875]: I1007 07:57:47.821941 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:47 crc kubenswrapper[4875]: I1007 07:57:47.821979 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:47 crc kubenswrapper[4875]: I1007 07:57:47.821987 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:47 crc kubenswrapper[4875]: I1007 07:57:47.822000 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:47 crc kubenswrapper[4875]: I1007 07:57:47.822008 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:47Z","lastTransitionTime":"2025-10-07T07:57:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:47 crc kubenswrapper[4875]: I1007 07:57:47.924907 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:47 crc kubenswrapper[4875]: I1007 07:57:47.924950 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:47 crc kubenswrapper[4875]: I1007 07:57:47.924960 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:47 crc kubenswrapper[4875]: I1007 07:57:47.924977 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:47 crc kubenswrapper[4875]: I1007 07:57:47.924988 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:47Z","lastTransitionTime":"2025-10-07T07:57:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:48 crc kubenswrapper[4875]: I1007 07:57:48.027471 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:48 crc kubenswrapper[4875]: I1007 07:57:48.027507 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:48 crc kubenswrapper[4875]: I1007 07:57:48.027517 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:48 crc kubenswrapper[4875]: I1007 07:57:48.027536 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:48 crc kubenswrapper[4875]: I1007 07:57:48.027547 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:48Z","lastTransitionTime":"2025-10-07T07:57:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:48 crc kubenswrapper[4875]: I1007 07:57:48.129758 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:48 crc kubenswrapper[4875]: I1007 07:57:48.130115 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:48 crc kubenswrapper[4875]: I1007 07:57:48.130220 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:48 crc kubenswrapper[4875]: I1007 07:57:48.130323 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:48 crc kubenswrapper[4875]: I1007 07:57:48.130411 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:48Z","lastTransitionTime":"2025-10-07T07:57:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:48 crc kubenswrapper[4875]: I1007 07:57:48.232639 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:48 crc kubenswrapper[4875]: I1007 07:57:48.232697 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:48 crc kubenswrapper[4875]: I1007 07:57:48.232706 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:48 crc kubenswrapper[4875]: I1007 07:57:48.232720 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:48 crc kubenswrapper[4875]: I1007 07:57:48.232728 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:48Z","lastTransitionTime":"2025-10-07T07:57:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:48 crc kubenswrapper[4875]: I1007 07:57:48.336409 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:48 crc kubenswrapper[4875]: I1007 07:57:48.336695 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:48 crc kubenswrapper[4875]: I1007 07:57:48.336769 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:48 crc kubenswrapper[4875]: I1007 07:57:48.336856 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:48 crc kubenswrapper[4875]: I1007 07:57:48.336964 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:48Z","lastTransitionTime":"2025-10-07T07:57:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:48 crc kubenswrapper[4875]: I1007 07:57:48.438928 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:48 crc kubenswrapper[4875]: I1007 07:57:48.439257 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:48 crc kubenswrapper[4875]: I1007 07:57:48.439329 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:48 crc kubenswrapper[4875]: I1007 07:57:48.439406 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:48 crc kubenswrapper[4875]: I1007 07:57:48.439467 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:48Z","lastTransitionTime":"2025-10-07T07:57:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:48 crc kubenswrapper[4875]: I1007 07:57:48.542299 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:48 crc kubenswrapper[4875]: I1007 07:57:48.542367 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:48 crc kubenswrapper[4875]: I1007 07:57:48.542381 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:48 crc kubenswrapper[4875]: I1007 07:57:48.542406 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:48 crc kubenswrapper[4875]: I1007 07:57:48.542423 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:48Z","lastTransitionTime":"2025-10-07T07:57:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:48 crc kubenswrapper[4875]: I1007 07:57:48.644206 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:48 crc kubenswrapper[4875]: I1007 07:57:48.644518 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:48 crc kubenswrapper[4875]: I1007 07:57:48.644604 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:48 crc kubenswrapper[4875]: I1007 07:57:48.644675 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:48 crc kubenswrapper[4875]: I1007 07:57:48.644749 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:48Z","lastTransitionTime":"2025-10-07T07:57:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:48 crc kubenswrapper[4875]: I1007 07:57:48.696721 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 07:57:48 crc kubenswrapper[4875]: I1007 07:57:48.696721 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-wk8rw" Oct 07 07:57:48 crc kubenswrapper[4875]: E1007 07:57:48.697013 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 07:57:48 crc kubenswrapper[4875]: E1007 07:57:48.697171 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-wk8rw" podUID="dce21abc-1295-4d45-bd26-07b7e37d674c" Oct 07 07:57:48 crc kubenswrapper[4875]: I1007 07:57:48.747194 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:48 crc kubenswrapper[4875]: I1007 07:57:48.747256 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:48 crc kubenswrapper[4875]: I1007 07:57:48.747273 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:48 crc kubenswrapper[4875]: I1007 07:57:48.747302 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:48 crc kubenswrapper[4875]: I1007 07:57:48.747323 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:48Z","lastTransitionTime":"2025-10-07T07:57:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:48 crc kubenswrapper[4875]: I1007 07:57:48.850250 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:48 crc kubenswrapper[4875]: I1007 07:57:48.850339 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:48 crc kubenswrapper[4875]: I1007 07:57:48.850359 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:48 crc kubenswrapper[4875]: I1007 07:57:48.850395 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:48 crc kubenswrapper[4875]: I1007 07:57:48.850418 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:48Z","lastTransitionTime":"2025-10-07T07:57:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:48 crc kubenswrapper[4875]: I1007 07:57:48.953496 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:48 crc kubenswrapper[4875]: I1007 07:57:48.953550 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:48 crc kubenswrapper[4875]: I1007 07:57:48.953561 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:48 crc kubenswrapper[4875]: I1007 07:57:48.953581 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:48 crc kubenswrapper[4875]: I1007 07:57:48.953592 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:48Z","lastTransitionTime":"2025-10-07T07:57:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:49 crc kubenswrapper[4875]: I1007 07:57:49.056259 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:49 crc kubenswrapper[4875]: I1007 07:57:49.056310 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:49 crc kubenswrapper[4875]: I1007 07:57:49.056321 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:49 crc kubenswrapper[4875]: I1007 07:57:49.056338 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:49 crc kubenswrapper[4875]: I1007 07:57:49.056350 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:49Z","lastTransitionTime":"2025-10-07T07:57:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:49 crc kubenswrapper[4875]: I1007 07:57:49.160023 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:49 crc kubenswrapper[4875]: I1007 07:57:49.160061 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:49 crc kubenswrapper[4875]: I1007 07:57:49.160069 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:49 crc kubenswrapper[4875]: I1007 07:57:49.160084 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:49 crc kubenswrapper[4875]: I1007 07:57:49.160093 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:49Z","lastTransitionTime":"2025-10-07T07:57:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:49 crc kubenswrapper[4875]: I1007 07:57:49.263582 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:49 crc kubenswrapper[4875]: I1007 07:57:49.263626 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:49 crc kubenswrapper[4875]: I1007 07:57:49.263635 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:49 crc kubenswrapper[4875]: I1007 07:57:49.263650 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:49 crc kubenswrapper[4875]: I1007 07:57:49.263659 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:49Z","lastTransitionTime":"2025-10-07T07:57:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:49 crc kubenswrapper[4875]: I1007 07:57:49.366732 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:49 crc kubenswrapper[4875]: I1007 07:57:49.366837 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:49 crc kubenswrapper[4875]: I1007 07:57:49.366917 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:49 crc kubenswrapper[4875]: I1007 07:57:49.366969 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:49 crc kubenswrapper[4875]: I1007 07:57:49.367001 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:49Z","lastTransitionTime":"2025-10-07T07:57:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:49 crc kubenswrapper[4875]: I1007 07:57:49.469848 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:49 crc kubenswrapper[4875]: I1007 07:57:49.469930 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:49 crc kubenswrapper[4875]: I1007 07:57:49.469946 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:49 crc kubenswrapper[4875]: I1007 07:57:49.469971 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:49 crc kubenswrapper[4875]: I1007 07:57:49.469990 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:49Z","lastTransitionTime":"2025-10-07T07:57:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:49 crc kubenswrapper[4875]: I1007 07:57:49.572625 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:49 crc kubenswrapper[4875]: I1007 07:57:49.572661 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:49 crc kubenswrapper[4875]: I1007 07:57:49.572669 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:49 crc kubenswrapper[4875]: I1007 07:57:49.572684 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:49 crc kubenswrapper[4875]: I1007 07:57:49.572693 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:49Z","lastTransitionTime":"2025-10-07T07:57:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:49 crc kubenswrapper[4875]: I1007 07:57:49.675166 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:49 crc kubenswrapper[4875]: I1007 07:57:49.675212 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:49 crc kubenswrapper[4875]: I1007 07:57:49.675221 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:49 crc kubenswrapper[4875]: I1007 07:57:49.675237 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:49 crc kubenswrapper[4875]: I1007 07:57:49.675247 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:49Z","lastTransitionTime":"2025-10-07T07:57:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:49 crc kubenswrapper[4875]: I1007 07:57:49.696425 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 07:57:49 crc kubenswrapper[4875]: E1007 07:57:49.696563 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 07:57:49 crc kubenswrapper[4875]: I1007 07:57:49.696672 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 07:57:49 crc kubenswrapper[4875]: E1007 07:57:49.697001 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 07:57:49 crc kubenswrapper[4875]: I1007 07:57:49.778077 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:49 crc kubenswrapper[4875]: I1007 07:57:49.778120 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:49 crc kubenswrapper[4875]: I1007 07:57:49.778137 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:49 crc kubenswrapper[4875]: I1007 07:57:49.778155 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:49 crc kubenswrapper[4875]: I1007 07:57:49.778167 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:49Z","lastTransitionTime":"2025-10-07T07:57:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:49 crc kubenswrapper[4875]: I1007 07:57:49.880977 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:49 crc kubenswrapper[4875]: I1007 07:57:49.881032 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:49 crc kubenswrapper[4875]: I1007 07:57:49.881043 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:49 crc kubenswrapper[4875]: I1007 07:57:49.881058 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:49 crc kubenswrapper[4875]: I1007 07:57:49.881067 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:49Z","lastTransitionTime":"2025-10-07T07:57:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:49 crc kubenswrapper[4875]: I1007 07:57:49.983444 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:49 crc kubenswrapper[4875]: I1007 07:57:49.983534 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:49 crc kubenswrapper[4875]: I1007 07:57:49.983636 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:49 crc kubenswrapper[4875]: I1007 07:57:49.983711 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:49 crc kubenswrapper[4875]: I1007 07:57:49.983740 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:49Z","lastTransitionTime":"2025-10-07T07:57:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:50 crc kubenswrapper[4875]: I1007 07:57:50.086698 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:50 crc kubenswrapper[4875]: I1007 07:57:50.086768 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:50 crc kubenswrapper[4875]: I1007 07:57:50.086779 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:50 crc kubenswrapper[4875]: I1007 07:57:50.086800 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:50 crc kubenswrapper[4875]: I1007 07:57:50.086813 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:50Z","lastTransitionTime":"2025-10-07T07:57:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:50 crc kubenswrapper[4875]: I1007 07:57:50.190173 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:50 crc kubenswrapper[4875]: I1007 07:57:50.190217 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:50 crc kubenswrapper[4875]: I1007 07:57:50.190227 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:50 crc kubenswrapper[4875]: I1007 07:57:50.190244 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:50 crc kubenswrapper[4875]: I1007 07:57:50.190255 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:50Z","lastTransitionTime":"2025-10-07T07:57:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:50 crc kubenswrapper[4875]: I1007 07:57:50.293065 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:50 crc kubenswrapper[4875]: I1007 07:57:50.293153 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:50 crc kubenswrapper[4875]: I1007 07:57:50.293177 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:50 crc kubenswrapper[4875]: I1007 07:57:50.293211 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:50 crc kubenswrapper[4875]: I1007 07:57:50.293235 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:50Z","lastTransitionTime":"2025-10-07T07:57:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:50 crc kubenswrapper[4875]: I1007 07:57:50.396295 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:50 crc kubenswrapper[4875]: I1007 07:57:50.396346 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:50 crc kubenswrapper[4875]: I1007 07:57:50.396358 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:50 crc kubenswrapper[4875]: I1007 07:57:50.396379 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:50 crc kubenswrapper[4875]: I1007 07:57:50.396392 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:50Z","lastTransitionTime":"2025-10-07T07:57:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:50 crc kubenswrapper[4875]: I1007 07:57:50.499059 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:50 crc kubenswrapper[4875]: I1007 07:57:50.499094 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:50 crc kubenswrapper[4875]: I1007 07:57:50.499102 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:50 crc kubenswrapper[4875]: I1007 07:57:50.499120 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:50 crc kubenswrapper[4875]: I1007 07:57:50.499133 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:50Z","lastTransitionTime":"2025-10-07T07:57:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:50 crc kubenswrapper[4875]: I1007 07:57:50.601021 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:50 crc kubenswrapper[4875]: I1007 07:57:50.601067 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:50 crc kubenswrapper[4875]: I1007 07:57:50.601075 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:50 crc kubenswrapper[4875]: I1007 07:57:50.601089 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:50 crc kubenswrapper[4875]: I1007 07:57:50.601097 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:50Z","lastTransitionTime":"2025-10-07T07:57:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:50 crc kubenswrapper[4875]: I1007 07:57:50.697261 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-wk8rw" Oct 07 07:57:50 crc kubenswrapper[4875]: E1007 07:57:50.697388 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-wk8rw" podUID="dce21abc-1295-4d45-bd26-07b7e37d674c" Oct 07 07:57:50 crc kubenswrapper[4875]: I1007 07:57:50.698023 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 07:57:50 crc kubenswrapper[4875]: E1007 07:57:50.698188 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 07:57:50 crc kubenswrapper[4875]: I1007 07:57:50.705699 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:50 crc kubenswrapper[4875]: I1007 07:57:50.705742 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:50 crc kubenswrapper[4875]: I1007 07:57:50.705751 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:50 crc kubenswrapper[4875]: I1007 07:57:50.705767 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:50 crc kubenswrapper[4875]: I1007 07:57:50.705777 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:50Z","lastTransitionTime":"2025-10-07T07:57:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:50 crc kubenswrapper[4875]: I1007 07:57:50.809806 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:50 crc kubenswrapper[4875]: I1007 07:57:50.809852 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:50 crc kubenswrapper[4875]: I1007 07:57:50.809863 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:50 crc kubenswrapper[4875]: I1007 07:57:50.809904 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:50 crc kubenswrapper[4875]: I1007 07:57:50.809920 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:50Z","lastTransitionTime":"2025-10-07T07:57:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:50 crc kubenswrapper[4875]: I1007 07:57:50.912660 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:50 crc kubenswrapper[4875]: I1007 07:57:50.912706 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:50 crc kubenswrapper[4875]: I1007 07:57:50.912716 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:50 crc kubenswrapper[4875]: I1007 07:57:50.912733 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:50 crc kubenswrapper[4875]: I1007 07:57:50.912742 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:50Z","lastTransitionTime":"2025-10-07T07:57:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:51 crc kubenswrapper[4875]: I1007 07:57:51.015541 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:51 crc kubenswrapper[4875]: I1007 07:57:51.015609 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:51 crc kubenswrapper[4875]: I1007 07:57:51.015618 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:51 crc kubenswrapper[4875]: I1007 07:57:51.015637 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:51 crc kubenswrapper[4875]: I1007 07:57:51.015649 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:51Z","lastTransitionTime":"2025-10-07T07:57:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:51 crc kubenswrapper[4875]: I1007 07:57:51.118971 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:51 crc kubenswrapper[4875]: I1007 07:57:51.119013 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:51 crc kubenswrapper[4875]: I1007 07:57:51.119022 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:51 crc kubenswrapper[4875]: I1007 07:57:51.119037 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:51 crc kubenswrapper[4875]: I1007 07:57:51.119049 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:51Z","lastTransitionTime":"2025-10-07T07:57:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:51 crc kubenswrapper[4875]: I1007 07:57:51.221658 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:51 crc kubenswrapper[4875]: I1007 07:57:51.221740 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:51 crc kubenswrapper[4875]: I1007 07:57:51.221757 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:51 crc kubenswrapper[4875]: I1007 07:57:51.221787 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:51 crc kubenswrapper[4875]: I1007 07:57:51.221805 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:51Z","lastTransitionTime":"2025-10-07T07:57:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:51 crc kubenswrapper[4875]: I1007 07:57:51.324283 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:51 crc kubenswrapper[4875]: I1007 07:57:51.324334 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:51 crc kubenswrapper[4875]: I1007 07:57:51.324349 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:51 crc kubenswrapper[4875]: I1007 07:57:51.324371 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:51 crc kubenswrapper[4875]: I1007 07:57:51.324385 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:51Z","lastTransitionTime":"2025-10-07T07:57:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:51 crc kubenswrapper[4875]: I1007 07:57:51.428132 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:51 crc kubenswrapper[4875]: I1007 07:57:51.428238 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:51 crc kubenswrapper[4875]: I1007 07:57:51.428257 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:51 crc kubenswrapper[4875]: I1007 07:57:51.428288 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:51 crc kubenswrapper[4875]: I1007 07:57:51.428306 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:51Z","lastTransitionTime":"2025-10-07T07:57:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:51 crc kubenswrapper[4875]: I1007 07:57:51.530936 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:51 crc kubenswrapper[4875]: I1007 07:57:51.530988 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:51 crc kubenswrapper[4875]: I1007 07:57:51.530999 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:51 crc kubenswrapper[4875]: I1007 07:57:51.531014 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:51 crc kubenswrapper[4875]: I1007 07:57:51.531025 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:51Z","lastTransitionTime":"2025-10-07T07:57:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:51 crc kubenswrapper[4875]: I1007 07:57:51.634563 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:51 crc kubenswrapper[4875]: I1007 07:57:51.634614 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:51 crc kubenswrapper[4875]: I1007 07:57:51.634624 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:51 crc kubenswrapper[4875]: I1007 07:57:51.634643 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:51 crc kubenswrapper[4875]: I1007 07:57:51.634654 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:51Z","lastTransitionTime":"2025-10-07T07:57:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:51 crc kubenswrapper[4875]: I1007 07:57:51.697399 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 07:57:51 crc kubenswrapper[4875]: I1007 07:57:51.697399 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 07:57:51 crc kubenswrapper[4875]: E1007 07:57:51.697677 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 07:57:51 crc kubenswrapper[4875]: E1007 07:57:51.697777 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 07:57:51 crc kubenswrapper[4875]: I1007 07:57:51.737493 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:51 crc kubenswrapper[4875]: I1007 07:57:51.737571 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:51 crc kubenswrapper[4875]: I1007 07:57:51.737588 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:51 crc kubenswrapper[4875]: I1007 07:57:51.737616 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:51 crc kubenswrapper[4875]: I1007 07:57:51.737637 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:51Z","lastTransitionTime":"2025-10-07T07:57:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:51 crc kubenswrapper[4875]: I1007 07:57:51.840697 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:51 crc kubenswrapper[4875]: I1007 07:57:51.840768 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:51 crc kubenswrapper[4875]: I1007 07:57:51.840788 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:51 crc kubenswrapper[4875]: I1007 07:57:51.840818 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:51 crc kubenswrapper[4875]: I1007 07:57:51.840837 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:51Z","lastTransitionTime":"2025-10-07T07:57:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:51 crc kubenswrapper[4875]: I1007 07:57:51.943818 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:51 crc kubenswrapper[4875]: I1007 07:57:51.943870 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:51 crc kubenswrapper[4875]: I1007 07:57:51.943894 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:51 crc kubenswrapper[4875]: I1007 07:57:51.943914 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:51 crc kubenswrapper[4875]: I1007 07:57:51.943927 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:51Z","lastTransitionTime":"2025-10-07T07:57:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:52 crc kubenswrapper[4875]: I1007 07:57:52.046963 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:52 crc kubenswrapper[4875]: I1007 07:57:52.047039 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:52 crc kubenswrapper[4875]: I1007 07:57:52.047060 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:52 crc kubenswrapper[4875]: I1007 07:57:52.047096 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:52 crc kubenswrapper[4875]: I1007 07:57:52.047121 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:52Z","lastTransitionTime":"2025-10-07T07:57:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:52 crc kubenswrapper[4875]: I1007 07:57:52.149798 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:52 crc kubenswrapper[4875]: I1007 07:57:52.149887 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:52 crc kubenswrapper[4875]: I1007 07:57:52.149905 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:52 crc kubenswrapper[4875]: I1007 07:57:52.149933 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:52 crc kubenswrapper[4875]: I1007 07:57:52.149950 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:52Z","lastTransitionTime":"2025-10-07T07:57:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:52 crc kubenswrapper[4875]: I1007 07:57:52.252647 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:52 crc kubenswrapper[4875]: I1007 07:57:52.252718 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:52 crc kubenswrapper[4875]: I1007 07:57:52.252729 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:52 crc kubenswrapper[4875]: I1007 07:57:52.252757 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:52 crc kubenswrapper[4875]: I1007 07:57:52.252777 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:52Z","lastTransitionTime":"2025-10-07T07:57:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:52 crc kubenswrapper[4875]: I1007 07:57:52.355848 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:52 crc kubenswrapper[4875]: I1007 07:57:52.355956 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:52 crc kubenswrapper[4875]: I1007 07:57:52.355977 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:52 crc kubenswrapper[4875]: I1007 07:57:52.356011 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:52 crc kubenswrapper[4875]: I1007 07:57:52.356030 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:52Z","lastTransitionTime":"2025-10-07T07:57:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:52 crc kubenswrapper[4875]: I1007 07:57:52.460149 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:52 crc kubenswrapper[4875]: I1007 07:57:52.460217 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:52 crc kubenswrapper[4875]: I1007 07:57:52.460231 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:52 crc kubenswrapper[4875]: I1007 07:57:52.460252 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:52 crc kubenswrapper[4875]: I1007 07:57:52.460268 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:52Z","lastTransitionTime":"2025-10-07T07:57:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:52 crc kubenswrapper[4875]: I1007 07:57:52.563083 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:52 crc kubenswrapper[4875]: I1007 07:57:52.563129 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:52 crc kubenswrapper[4875]: I1007 07:57:52.563139 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:52 crc kubenswrapper[4875]: I1007 07:57:52.563155 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:52 crc kubenswrapper[4875]: I1007 07:57:52.563166 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:52Z","lastTransitionTime":"2025-10-07T07:57:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:52 crc kubenswrapper[4875]: I1007 07:57:52.666774 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:52 crc kubenswrapper[4875]: I1007 07:57:52.666832 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:52 crc kubenswrapper[4875]: I1007 07:57:52.666861 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:52 crc kubenswrapper[4875]: I1007 07:57:52.666904 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:52 crc kubenswrapper[4875]: I1007 07:57:52.666917 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:52Z","lastTransitionTime":"2025-10-07T07:57:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:52 crc kubenswrapper[4875]: I1007 07:57:52.697386 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 07:57:52 crc kubenswrapper[4875]: I1007 07:57:52.697410 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-wk8rw" Oct 07 07:57:52 crc kubenswrapper[4875]: E1007 07:57:52.697743 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 07:57:52 crc kubenswrapper[4875]: E1007 07:57:52.697814 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-wk8rw" podUID="dce21abc-1295-4d45-bd26-07b7e37d674c" Oct 07 07:57:52 crc kubenswrapper[4875]: I1007 07:57:52.769910 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:52 crc kubenswrapper[4875]: I1007 07:57:52.769970 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:52 crc kubenswrapper[4875]: I1007 07:57:52.769981 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:52 crc kubenswrapper[4875]: I1007 07:57:52.770010 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:52 crc kubenswrapper[4875]: I1007 07:57:52.770026 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:52Z","lastTransitionTime":"2025-10-07T07:57:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:52 crc kubenswrapper[4875]: I1007 07:57:52.873173 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:52 crc kubenswrapper[4875]: I1007 07:57:52.873211 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:52 crc kubenswrapper[4875]: I1007 07:57:52.873220 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:52 crc kubenswrapper[4875]: I1007 07:57:52.873233 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:52 crc kubenswrapper[4875]: I1007 07:57:52.873242 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:52Z","lastTransitionTime":"2025-10-07T07:57:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:52 crc kubenswrapper[4875]: I1007 07:57:52.976138 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:52 crc kubenswrapper[4875]: I1007 07:57:52.976176 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:52 crc kubenswrapper[4875]: I1007 07:57:52.976184 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:52 crc kubenswrapper[4875]: I1007 07:57:52.976197 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:52 crc kubenswrapper[4875]: I1007 07:57:52.976206 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:52Z","lastTransitionTime":"2025-10-07T07:57:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:53 crc kubenswrapper[4875]: I1007 07:57:53.079374 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:53 crc kubenswrapper[4875]: I1007 07:57:53.079445 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:53 crc kubenswrapper[4875]: I1007 07:57:53.079456 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:53 crc kubenswrapper[4875]: I1007 07:57:53.079475 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:53 crc kubenswrapper[4875]: I1007 07:57:53.079488 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:53Z","lastTransitionTime":"2025-10-07T07:57:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:53 crc kubenswrapper[4875]: I1007 07:57:53.182369 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:53 crc kubenswrapper[4875]: I1007 07:57:53.182427 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:53 crc kubenswrapper[4875]: I1007 07:57:53.182440 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:53 crc kubenswrapper[4875]: I1007 07:57:53.182456 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:53 crc kubenswrapper[4875]: I1007 07:57:53.182466 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:53Z","lastTransitionTime":"2025-10-07T07:57:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:53 crc kubenswrapper[4875]: I1007 07:57:53.284897 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:53 crc kubenswrapper[4875]: I1007 07:57:53.284947 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:53 crc kubenswrapper[4875]: I1007 07:57:53.284959 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:53 crc kubenswrapper[4875]: I1007 07:57:53.284978 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:53 crc kubenswrapper[4875]: I1007 07:57:53.284988 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:53Z","lastTransitionTime":"2025-10-07T07:57:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:53 crc kubenswrapper[4875]: I1007 07:57:53.387537 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:53 crc kubenswrapper[4875]: I1007 07:57:53.387594 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:53 crc kubenswrapper[4875]: I1007 07:57:53.387604 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:53 crc kubenswrapper[4875]: I1007 07:57:53.387623 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:53 crc kubenswrapper[4875]: I1007 07:57:53.387636 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:53Z","lastTransitionTime":"2025-10-07T07:57:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:53 crc kubenswrapper[4875]: I1007 07:57:53.490739 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:53 crc kubenswrapper[4875]: I1007 07:57:53.490799 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:53 crc kubenswrapper[4875]: I1007 07:57:53.490813 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:53 crc kubenswrapper[4875]: I1007 07:57:53.490840 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:53 crc kubenswrapper[4875]: I1007 07:57:53.490855 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:53Z","lastTransitionTime":"2025-10-07T07:57:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:53 crc kubenswrapper[4875]: I1007 07:57:53.585133 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:53 crc kubenswrapper[4875]: I1007 07:57:53.585193 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:53 crc kubenswrapper[4875]: I1007 07:57:53.585210 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:53 crc kubenswrapper[4875]: I1007 07:57:53.585239 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:53 crc kubenswrapper[4875]: I1007 07:57:53.585258 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:53Z","lastTransitionTime":"2025-10-07T07:57:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:53 crc kubenswrapper[4875]: E1007 07:57:53.600784 4875 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T07:57:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:53Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T07:57:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:53Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T07:57:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:53Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T07:57:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:53Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"e390eb55-bfef-4c82-ba02-53426a3fd939\\\",\\\"systemUUID\\\":\\\"288fc606-a984-4194-ac49-303e4a239cb9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:53Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:53 crc kubenswrapper[4875]: I1007 07:57:53.606228 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:53 crc kubenswrapper[4875]: I1007 07:57:53.606271 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:53 crc kubenswrapper[4875]: I1007 07:57:53.606285 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:53 crc kubenswrapper[4875]: I1007 07:57:53.606309 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:53 crc kubenswrapper[4875]: I1007 07:57:53.606327 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:53Z","lastTransitionTime":"2025-10-07T07:57:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:53 crc kubenswrapper[4875]: E1007 07:57:53.623379 4875 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T07:57:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:53Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T07:57:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:53Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T07:57:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:53Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T07:57:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:53Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"e390eb55-bfef-4c82-ba02-53426a3fd939\\\",\\\"systemUUID\\\":\\\"288fc606-a984-4194-ac49-303e4a239cb9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:53Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:53 crc kubenswrapper[4875]: I1007 07:57:53.628448 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:53 crc kubenswrapper[4875]: I1007 07:57:53.628544 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:53 crc kubenswrapper[4875]: I1007 07:57:53.628558 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:53 crc kubenswrapper[4875]: I1007 07:57:53.628579 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:53 crc kubenswrapper[4875]: I1007 07:57:53.628592 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:53Z","lastTransitionTime":"2025-10-07T07:57:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:53 crc kubenswrapper[4875]: E1007 07:57:53.645449 4875 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T07:57:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:53Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T07:57:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:53Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T07:57:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:53Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T07:57:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:53Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"e390eb55-bfef-4c82-ba02-53426a3fd939\\\",\\\"systemUUID\\\":\\\"288fc606-a984-4194-ac49-303e4a239cb9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:53Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:53 crc kubenswrapper[4875]: I1007 07:57:53.650655 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:53 crc kubenswrapper[4875]: I1007 07:57:53.650722 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:53 crc kubenswrapper[4875]: I1007 07:57:53.650754 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:53 crc kubenswrapper[4875]: I1007 07:57:53.650791 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:53 crc kubenswrapper[4875]: I1007 07:57:53.650818 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:53Z","lastTransitionTime":"2025-10-07T07:57:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:53 crc kubenswrapper[4875]: E1007 07:57:53.668635 4875 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T07:57:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:53Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T07:57:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:53Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T07:57:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:53Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T07:57:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:53Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"e390eb55-bfef-4c82-ba02-53426a3fd939\\\",\\\"systemUUID\\\":\\\"288fc606-a984-4194-ac49-303e4a239cb9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:53Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:53 crc kubenswrapper[4875]: I1007 07:57:53.674139 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:53 crc kubenswrapper[4875]: I1007 07:57:53.674206 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:53 crc kubenswrapper[4875]: I1007 07:57:53.674226 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:53 crc kubenswrapper[4875]: I1007 07:57:53.674249 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:53 crc kubenswrapper[4875]: I1007 07:57:53.674263 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:53Z","lastTransitionTime":"2025-10-07T07:57:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:53 crc kubenswrapper[4875]: E1007 07:57:53.691639 4875 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T07:57:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:53Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T07:57:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:53Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T07:57:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:53Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T07:57:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:53Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"e390eb55-bfef-4c82-ba02-53426a3fd939\\\",\\\"systemUUID\\\":\\\"288fc606-a984-4194-ac49-303e4a239cb9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:53Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:53 crc kubenswrapper[4875]: E1007 07:57:53.691915 4875 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Oct 07 07:57:53 crc kubenswrapper[4875]: I1007 07:57:53.694633 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:53 crc kubenswrapper[4875]: I1007 07:57:53.694690 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:53 crc kubenswrapper[4875]: I1007 07:57:53.694709 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:53 crc kubenswrapper[4875]: I1007 07:57:53.694739 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:53 crc kubenswrapper[4875]: I1007 07:57:53.694764 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:53Z","lastTransitionTime":"2025-10-07T07:57:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:53 crc kubenswrapper[4875]: I1007 07:57:53.697480 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 07:57:53 crc kubenswrapper[4875]: I1007 07:57:53.697629 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 07:57:53 crc kubenswrapper[4875]: E1007 07:57:53.697897 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 07:57:53 crc kubenswrapper[4875]: E1007 07:57:53.698310 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 07:57:53 crc kubenswrapper[4875]: I1007 07:57:53.699374 4875 scope.go:117] "RemoveContainer" containerID="5d8ea2a287411218c24c1e8eaf4aeb64b4833e621385a4d8b32ffc0f79ce461f" Oct 07 07:57:53 crc kubenswrapper[4875]: E1007 07:57:53.699548 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-8tcxj_openshift-ovn-kubernetes(f7806e48-48e7-4680-af2e-e93a05003370)\"" pod="openshift-ovn-kubernetes/ovnkube-node-8tcxj" podUID="f7806e48-48e7-4680-af2e-e93a05003370" Oct 07 07:57:53 crc kubenswrapper[4875]: I1007 07:57:53.798172 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:53 crc kubenswrapper[4875]: I1007 07:57:53.798254 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:53 crc kubenswrapper[4875]: I1007 07:57:53.798278 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:53 crc kubenswrapper[4875]: I1007 07:57:53.798308 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:53 crc kubenswrapper[4875]: I1007 07:57:53.798334 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:53Z","lastTransitionTime":"2025-10-07T07:57:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:53 crc kubenswrapper[4875]: I1007 07:57:53.902399 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:53 crc kubenswrapper[4875]: I1007 07:57:53.902509 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:53 crc kubenswrapper[4875]: I1007 07:57:53.902528 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:53 crc kubenswrapper[4875]: I1007 07:57:53.902556 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:53 crc kubenswrapper[4875]: I1007 07:57:53.902572 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:53Z","lastTransitionTime":"2025-10-07T07:57:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:54 crc kubenswrapper[4875]: I1007 07:57:54.005015 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:54 crc kubenswrapper[4875]: I1007 07:57:54.005095 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:54 crc kubenswrapper[4875]: I1007 07:57:54.005109 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:54 crc kubenswrapper[4875]: I1007 07:57:54.005148 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:54 crc kubenswrapper[4875]: I1007 07:57:54.005162 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:54Z","lastTransitionTime":"2025-10-07T07:57:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:54 crc kubenswrapper[4875]: I1007 07:57:54.108342 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:54 crc kubenswrapper[4875]: I1007 07:57:54.108414 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:54 crc kubenswrapper[4875]: I1007 07:57:54.108427 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:54 crc kubenswrapper[4875]: I1007 07:57:54.108453 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:54 crc kubenswrapper[4875]: I1007 07:57:54.108470 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:54Z","lastTransitionTime":"2025-10-07T07:57:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:54 crc kubenswrapper[4875]: I1007 07:57:54.211326 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:54 crc kubenswrapper[4875]: I1007 07:57:54.211373 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:54 crc kubenswrapper[4875]: I1007 07:57:54.211383 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:54 crc kubenswrapper[4875]: I1007 07:57:54.211404 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:54 crc kubenswrapper[4875]: I1007 07:57:54.211417 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:54Z","lastTransitionTime":"2025-10-07T07:57:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:54 crc kubenswrapper[4875]: I1007 07:57:54.313945 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:54 crc kubenswrapper[4875]: I1007 07:57:54.314004 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:54 crc kubenswrapper[4875]: I1007 07:57:54.314019 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:54 crc kubenswrapper[4875]: I1007 07:57:54.314040 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:54 crc kubenswrapper[4875]: I1007 07:57:54.314074 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:54Z","lastTransitionTime":"2025-10-07T07:57:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:54 crc kubenswrapper[4875]: I1007 07:57:54.417497 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:54 crc kubenswrapper[4875]: I1007 07:57:54.417541 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:54 crc kubenswrapper[4875]: I1007 07:57:54.417551 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:54 crc kubenswrapper[4875]: I1007 07:57:54.417568 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:54 crc kubenswrapper[4875]: I1007 07:57:54.417579 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:54Z","lastTransitionTime":"2025-10-07T07:57:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:54 crc kubenswrapper[4875]: I1007 07:57:54.520358 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:54 crc kubenswrapper[4875]: I1007 07:57:54.520427 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:54 crc kubenswrapper[4875]: I1007 07:57:54.520446 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:54 crc kubenswrapper[4875]: I1007 07:57:54.520479 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:54 crc kubenswrapper[4875]: I1007 07:57:54.520502 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:54Z","lastTransitionTime":"2025-10-07T07:57:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:54 crc kubenswrapper[4875]: I1007 07:57:54.623356 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:54 crc kubenswrapper[4875]: I1007 07:57:54.623429 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:54 crc kubenswrapper[4875]: I1007 07:57:54.623451 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:54 crc kubenswrapper[4875]: I1007 07:57:54.623480 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:54 crc kubenswrapper[4875]: I1007 07:57:54.623500 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:54Z","lastTransitionTime":"2025-10-07T07:57:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:54 crc kubenswrapper[4875]: I1007 07:57:54.697619 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 07:57:54 crc kubenswrapper[4875]: I1007 07:57:54.697697 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-wk8rw" Oct 07 07:57:54 crc kubenswrapper[4875]: E1007 07:57:54.698009 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 07:57:54 crc kubenswrapper[4875]: E1007 07:57:54.698214 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-wk8rw" podUID="dce21abc-1295-4d45-bd26-07b7e37d674c" Oct 07 07:57:54 crc kubenswrapper[4875]: I1007 07:57:54.726617 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:54 crc kubenswrapper[4875]: I1007 07:57:54.726660 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:54 crc kubenswrapper[4875]: I1007 07:57:54.726674 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:54 crc kubenswrapper[4875]: I1007 07:57:54.726700 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:54 crc kubenswrapper[4875]: I1007 07:57:54.726715 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:54Z","lastTransitionTime":"2025-10-07T07:57:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:54 crc kubenswrapper[4875]: I1007 07:57:54.765396 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/dce21abc-1295-4d45-bd26-07b7e37d674c-metrics-certs\") pod \"network-metrics-daemon-wk8rw\" (UID: \"dce21abc-1295-4d45-bd26-07b7e37d674c\") " pod="openshift-multus/network-metrics-daemon-wk8rw" Oct 07 07:57:54 crc kubenswrapper[4875]: E1007 07:57:54.765673 4875 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 07 07:57:54 crc kubenswrapper[4875]: E1007 07:57:54.765851 4875 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/dce21abc-1295-4d45-bd26-07b7e37d674c-metrics-certs podName:dce21abc-1295-4d45-bd26-07b7e37d674c nodeName:}" failed. No retries permitted until 2025-10-07 07:58:58.765805348 +0000 UTC m=+163.725575991 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/dce21abc-1295-4d45-bd26-07b7e37d674c-metrics-certs") pod "network-metrics-daemon-wk8rw" (UID: "dce21abc-1295-4d45-bd26-07b7e37d674c") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 07 07:57:54 crc kubenswrapper[4875]: I1007 07:57:54.830303 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:54 crc kubenswrapper[4875]: I1007 07:57:54.830377 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:54 crc kubenswrapper[4875]: I1007 07:57:54.830402 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:54 crc kubenswrapper[4875]: I1007 07:57:54.830436 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:54 crc kubenswrapper[4875]: I1007 07:57:54.830460 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:54Z","lastTransitionTime":"2025-10-07T07:57:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:54 crc kubenswrapper[4875]: I1007 07:57:54.933873 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:54 crc kubenswrapper[4875]: I1007 07:57:54.933932 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:54 crc kubenswrapper[4875]: I1007 07:57:54.933942 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:54 crc kubenswrapper[4875]: I1007 07:57:54.933958 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:54 crc kubenswrapper[4875]: I1007 07:57:54.933968 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:54Z","lastTransitionTime":"2025-10-07T07:57:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:55 crc kubenswrapper[4875]: I1007 07:57:55.036949 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:55 crc kubenswrapper[4875]: I1007 07:57:55.037042 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:55 crc kubenswrapper[4875]: I1007 07:57:55.037066 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:55 crc kubenswrapper[4875]: I1007 07:57:55.037098 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:55 crc kubenswrapper[4875]: I1007 07:57:55.037122 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:55Z","lastTransitionTime":"2025-10-07T07:57:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:55 crc kubenswrapper[4875]: I1007 07:57:55.140672 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:55 crc kubenswrapper[4875]: I1007 07:57:55.140741 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:55 crc kubenswrapper[4875]: I1007 07:57:55.140764 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:55 crc kubenswrapper[4875]: I1007 07:57:55.140795 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:55 crc kubenswrapper[4875]: I1007 07:57:55.140813 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:55Z","lastTransitionTime":"2025-10-07T07:57:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:55 crc kubenswrapper[4875]: I1007 07:57:55.246297 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:55 crc kubenswrapper[4875]: I1007 07:57:55.246406 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:55 crc kubenswrapper[4875]: I1007 07:57:55.246443 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:55 crc kubenswrapper[4875]: I1007 07:57:55.246481 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:55 crc kubenswrapper[4875]: I1007 07:57:55.246505 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:55Z","lastTransitionTime":"2025-10-07T07:57:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:55 crc kubenswrapper[4875]: I1007 07:57:55.350497 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:55 crc kubenswrapper[4875]: I1007 07:57:55.350581 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:55 crc kubenswrapper[4875]: I1007 07:57:55.350597 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:55 crc kubenswrapper[4875]: I1007 07:57:55.350615 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:55 crc kubenswrapper[4875]: I1007 07:57:55.350638 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:55Z","lastTransitionTime":"2025-10-07T07:57:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:55 crc kubenswrapper[4875]: I1007 07:57:55.453582 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:55 crc kubenswrapper[4875]: I1007 07:57:55.453632 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:55 crc kubenswrapper[4875]: I1007 07:57:55.453648 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:55 crc kubenswrapper[4875]: I1007 07:57:55.453672 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:55 crc kubenswrapper[4875]: I1007 07:57:55.453687 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:55Z","lastTransitionTime":"2025-10-07T07:57:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:55 crc kubenswrapper[4875]: I1007 07:57:55.556965 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:55 crc kubenswrapper[4875]: I1007 07:57:55.557019 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:55 crc kubenswrapper[4875]: I1007 07:57:55.557031 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:55 crc kubenswrapper[4875]: I1007 07:57:55.557051 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:55 crc kubenswrapper[4875]: I1007 07:57:55.557063 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:55Z","lastTransitionTime":"2025-10-07T07:57:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:55 crc kubenswrapper[4875]: I1007 07:57:55.660545 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:55 crc kubenswrapper[4875]: I1007 07:57:55.660612 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:55 crc kubenswrapper[4875]: I1007 07:57:55.660640 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:55 crc kubenswrapper[4875]: I1007 07:57:55.660666 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:55 crc kubenswrapper[4875]: I1007 07:57:55.660686 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:55Z","lastTransitionTime":"2025-10-07T07:57:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:55 crc kubenswrapper[4875]: I1007 07:57:55.697815 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 07:57:55 crc kubenswrapper[4875]: I1007 07:57:55.697921 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 07:57:55 crc kubenswrapper[4875]: E1007 07:57:55.698063 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 07:57:55 crc kubenswrapper[4875]: E1007 07:57:55.698317 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 07:57:55 crc kubenswrapper[4875]: I1007 07:57:55.711635 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01fe5bea18c473e716831e2a9a876a8517d6de78b9b870929ecc1c4397d33fb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:55Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:55 crc kubenswrapper[4875]: I1007 07:57:55.728206 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wc2jq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e5a790e1-c591-4cfc-930f-4805a923790b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://512ba91c00a610fe56d1abb8dd82712ba14ce76b4148fe0c69e8864c9a87f977\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0fda9fd366641ca5799c0ca6c7aa926af6b41609e157c11a15b2077d4a5ce3aa\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-07T07:57:24Z\\\",\\\"message\\\":\\\"2025-10-07T07:56:39+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_790ceaf5-76bc-41c6-a081-e5e3ea810537\\\\n2025-10-07T07:56:39+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_790ceaf5-76bc-41c6-a081-e5e3ea810537 to /host/opt/cni/bin/\\\\n2025-10-07T07:56:39Z [verbose] multus-daemon started\\\\n2025-10-07T07:56:39Z [verbose] Readiness Indicator file check\\\\n2025-10-07T07:57:24Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:57:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4s782\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wc2jq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:55Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:55 crc kubenswrapper[4875]: I1007 07:57:55.741777 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3928c10c-c3da-41eb-96b2-629d67cfb31f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://27e5a1d7a65296f23b224f9ace2760c32b6e1ca146cf7a42a601de22632250d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qd4f6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34b235fca1c383e868641a953bb908901bcd10528ee9fb668c1f71eb6e0e978a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qd4f6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hx68m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:55Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:55 crc kubenswrapper[4875]: I1007 07:57:55.755819 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rbr4j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1d636971-3387-4f3c-b4a1-54a1da1e2fbe\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c836128e294716bdbbc7bf57f81f71a88656e83be509510722d13b3f66e7d3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7bd9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://12ef6aa200be5c0cc9b34435271b54e4852ad9118d7334d57e07c1f6f75100f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q7bd9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:48Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-rbr4j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:55Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:55 crc kubenswrapper[4875]: I1007 07:57:55.763381 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:55 crc kubenswrapper[4875]: I1007 07:57:55.763435 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:55 crc kubenswrapper[4875]: I1007 07:57:55.763455 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:55 crc kubenswrapper[4875]: I1007 07:57:55.763484 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:55 crc kubenswrapper[4875]: I1007 07:57:55.763500 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:55Z","lastTransitionTime":"2025-10-07T07:57:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:55 crc kubenswrapper[4875]: I1007 07:57:55.773863 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"01cd6f59-4f16-41d3-b43b-cd3cb9efd9b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ca1c70fa4c2dd9e87cc15766e27c2735768f392d019d6004ae50c94595651c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4bed5cf79847998d3d72c69b2f5773d64affb1a1f13ecc3af99bda8307bb8d1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ba6ab5ec6a70a00059ba119970c6b139879c3c568475f2acf45b00f4ade9e22\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10e2645f8bd2ed4e578809e1a15c65b5a0bf19a2bc135dd5f626b2b3dfce0113\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://671f4d3e518bf0230af61a6bfc3f70b8cea7f0e8f972ad605a78cded0306b084\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"message\\\":\\\"le observer\\\\nW1007 07:56:35.379471 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1007 07:56:35.379613 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1007 07:56:35.380419 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1733405231/tls.crt::/tmp/serving-cert-1733405231/tls.key\\\\\\\"\\\\nI1007 07:56:35.960385 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1007 07:56:35.963357 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1007 07:56:35.963380 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1007 07:56:35.963401 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1007 07:56:35.963407 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1007 07:56:35.978178 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1007 07:56:35.978231 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1007 07:56:35.978242 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI1007 07:56:35.978236 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1007 07:56:35.978254 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1007 07:56:35.978304 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1007 07:56:35.978312 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1007 07:56:35.978319 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1007 07:56:35.979012 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0b72e1d82ba1cfc118ea122535db5c358f503a17b497c21c481e1b56bb578e8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec502d866371b40ecc3c8728889725cc1c2d9c8be18dbce17935992980fc1b9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec502d866371b40ecc3c8728889725cc1c2d9c8be18dbce17935992980fc1b9a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:55Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:55 crc kubenswrapper[4875]: I1007 07:57:55.784906 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5e7e967b-bfbd-4738-9082-7bc94f2f32fb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21a5c9629b4ec0c85ab80d893dd502732395c3dd8b418a686d3c2ac55e69fa8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1222e78ddf0e6e37cfd1354cd95301c49aac7faf38193b66c6a3b66b0617d018\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1222e78ddf0e6e37cfd1354cd95301c49aac7faf38193b66c6a3b66b0617d018\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:15Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:55Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:55 crc kubenswrapper[4875]: I1007 07:57:55.800514 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:55Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:55 crc kubenswrapper[4875]: I1007 07:57:55.816594 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43e570e7b8391b0f38f3228dc0380ac8ee23e83f498989343bdee6a53c3ede41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:55Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:55 crc kubenswrapper[4875]: I1007 07:57:55.835045 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-wk8rw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dce21abc-1295-4d45-bd26-07b7e37d674c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:50Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:50Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jx7vv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jx7vv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:50Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-wk8rw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:55Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:55 crc kubenswrapper[4875]: I1007 07:57:55.860808 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:55Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:55 crc kubenswrapper[4875]: I1007 07:57:55.865254 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:55 crc kubenswrapper[4875]: I1007 07:57:55.865333 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:55 crc kubenswrapper[4875]: I1007 07:57:55.865360 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:55 crc kubenswrapper[4875]: I1007 07:57:55.865393 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:55 crc kubenswrapper[4875]: I1007 07:57:55.865418 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:55Z","lastTransitionTime":"2025-10-07T07:57:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:55 crc kubenswrapper[4875]: I1007 07:57:55.875772 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-9tw9m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f8b02a8a-b8d2-4097-b768-132e4c46938a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f6efce0afada950650dfd1a7345a5132509ead755458e53915fcf3335ffe73e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-9tw9m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:55Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:55 crc kubenswrapper[4875]: I1007 07:57:55.893361 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4ed41f5e-e186-4ede-a0a5-d5a015781ba1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:57:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce6527e17c61ccce5c81a87b9a6d3139c9f17a0736c08dfbb8907610d6270adb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c8f4b6a57610b6440afa3652ea5c2926072ad3bc658c72c30c8d28a2832cf900\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8a810dc9c2d8d92af77e0b4c1a21ab24e7da3de04f9c1b00b8399ec0b2cf4ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://965fc2ef9b26d12ee6c5e6adb88200cb52574c8f861f08d1f767c23ea861e6ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://965fc2ef9b26d12ee6c5e6adb88200cb52574c8f861f08d1f767c23ea861e6ef\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:16Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:15Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:55Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:55 crc kubenswrapper[4875]: I1007 07:57:55.911089 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:55Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:55 crc kubenswrapper[4875]: I1007 07:57:55.932703 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-zk2kz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7021fc56-b485-4ca6-80e8-56665ade004f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff571e5d1552af3387066929e3c02a939848e65fbf853aec600b03e0f72d5d3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d91377f30f2c7676df79d679bad49e0a162667214bd7e6ed4ad8ef3606a8752\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d91377f30f2c7676df79d679bad49e0a162667214bd7e6ed4ad8ef3606a8752\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecf0876b0824ab4fd9c692b0455050ac0a2d61663f74f13f578aebd6f3869f3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ecf0876b0824ab4fd9c692b0455050ac0a2d61663f74f13f578aebd6f3869f3f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9714afc60a059f6aa35d965ae1e15ea03e01cf9162a44d632ceed9d772e966bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9714afc60a059f6aa35d965ae1e15ea03e01cf9162a44d632ceed9d772e966bf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1f6109783534c4c80c81daf9eb0f04a9fbced0b143aeaa0c76683b9d2d38669\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c1f6109783534c4c80c81daf9eb0f04a9fbced0b143aeaa0c76683b9d2d38669\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c07a8fe8d39f3de33d3b132c33386a5c63a0948a1205f48e9a8788dd77ba8bc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c07a8fe8d39f3de33d3b132c33386a5c63a0948a1205f48e9a8788dd77ba8bc2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bce803064d5256265cdaf7f6e16d02780eab07d41d1f547dedd1c1b85972f543\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bce803064d5256265cdaf7f6e16d02780eab07d41d1f547dedd1c1b85972f543\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q24bq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-zk2kz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:55Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:55 crc kubenswrapper[4875]: I1007 07:57:55.945083 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ddx6l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0605d52-0cfc-4bcf-9218-1991257047cd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bdc7823b5b4f40adc245f9650864ebcb03f791dcbd6167f41c362bcdcc3a6655\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l9bwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:39Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ddx6l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:55Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:55 crc kubenswrapper[4875]: I1007 07:57:55.965420 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62ae4c53-7cf1-4229-84b4-045397dbcfba\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ae8881986c1b2abd841adfadbac7dc3d73096c5366485350536c08f478f9762\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://05f20f2103458077317614bcd5338d803c52c67e7dfc562103c817d33fa5bc79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f2213a20d36b8a125c6004957d5be402e0b85bbc2165ebd964ab499dfffb877\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://841f5c2218ad3912bf23dc2df80f24ca696829ba8e2c2d9722264688b25f0846\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:15Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:55Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:55 crc kubenswrapper[4875]: I1007 07:57:55.968318 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:55 crc kubenswrapper[4875]: I1007 07:57:55.968359 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:55 crc kubenswrapper[4875]: I1007 07:57:55.968372 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:55 crc kubenswrapper[4875]: I1007 07:57:55.968395 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:55 crc kubenswrapper[4875]: I1007 07:57:55.968412 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:55Z","lastTransitionTime":"2025-10-07T07:57:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:55 crc kubenswrapper[4875]: I1007 07:57:55.988447 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3527b3b9-1734-481d-9d80-9093e388afec\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7149243f8781d5a5e845821a8016bc06ec3678d69dfbde72d3429c48c64a98b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51d55feee1c9378afae0885cd063f39a2b4bd057db72c776941837d8d4098132\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6dc29635af9843109e3c200fc756d352e1a81b01dcc52a30a005291d1fa9f8d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e0b3573713f1b9d23177e5a1412d8db366ca718e6c19f6af9e234940505f2e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20382715fd9d79a3b60b16d950d6c4fe5b0d34ae14ad034f684d97762205263a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cfae59bc7d6183b2b5c6c04dd53b1bfc8b62abf90f67f74c7a56bf5039a5b50a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cfae59bc7d6183b2b5c6c04dd53b1bfc8b62abf90f67f74c7a56bf5039a5b50a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88968e9f639dc3664a87729128c531283b37b7c2da6aae70905f9f63a980fb54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88968e9f639dc3664a87729128c531283b37b7c2da6aae70905f9f63a980fb54\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:17Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://1ae0cbc996e8c2d24e8fbbf34b292d994bcc704c7b27fdc8b7fb7e64be0902ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1ae0cbc996e8c2d24e8fbbf34b292d994bcc704c7b27fdc8b7fb7e64be0902ca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:15Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:55Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:56 crc kubenswrapper[4875]: I1007 07:57:56.004831 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://455d92e6c0a43eabcbdc94940cb5bb0308a747e311fb79711b5ec8586d2f290c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e98da4809cdbbaa0bc7a3ed73df6eeb9260101f8336e16e3e93cba2be423f957\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:56Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:56 crc kubenswrapper[4875]: I1007 07:57:56.034931 4875 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-8tcxj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f7806e48-48e7-4680-af2e-e93a05003370\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e5e9d96fa7102ca0dcbb0fcc156b21899217a32baae85be23379cca6c8a6a93c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2df6aadbe568d17b58738f194596f0e83432176769f7c5bd87e7d4000d3f1ed8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5eb9157d6f41a164c7cb952c983b441030a97a8f3b8009359440c5eba9846f26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8c31cbcb48e003dac873f108a5eb3aaff8d7016f7557fb683ae607d7819d7ceb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c944396320312ab92eb8dd7f1d771a00b965031717edbbeb2b467cee0788dec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6db9e77922c120dbfa1ccd55c96777ac3b18e0fcf8bb96956414858ae2c8fcff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d8ea2a287411218c24c1e8eaf4aeb64b4833e621385a4d8b32ffc0f79ce461f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5d8ea2a287411218c24c1e8eaf4aeb64b4833e621385a4d8b32ffc0f79ce461f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-07T07:57:37Z\\\",\\\"message\\\":\\\"hine-config-controller for endpointslice openshift-machine-config-operator/machine-config-controller-5lh94 as it is not a known egress service\\\\nI1007 07:57:37.500114 6921 egressservice_zone_node.go:110] Processing sync for Egress Service node crc\\\\nI1007 07:57:37.500150 6921 egressservice_zone_node.go:113] Finished syncing Egress Service node crc: 36.191µs\\\\nI1007 07:57:37.499908 6921 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1007 07:57:37.499418 6921 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1007 07:57:37.499597 6921 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1007 07:57:37.496449 6921 reflector.go:311] Stopping reflector *v1.EgressService (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI1007 07:57:37.500239 6921 egressservice_zone_endpointslice.go:80] Ignoring updating openshift-machine-config-operator/machine-config-operator for endpointslice openshift-machine-config-operator/machine-config-operator-g8487 as it is not a known egress service\\\\nI1007 07:57:37.500577 6921 ovnkube.go:599] Stopped ovnkube\\\\nI1007 07:57:37.500613 6921 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1007 07:57:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T07:57:36Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-8tcxj_openshift-ovn-kubernetes(f7806e48-48e7-4680-af2e-e93a05003370)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b3b1269130a0a1192546bec4aa5b2d4bfd65f381f6cd3948dd54d49b482e6d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T07:56:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e7cdfd8fed2f21e7063841e5382e668130288feffd52ef6fc5f9b5bc1894f2a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e7cdfd8fed2f21e7063841e5382e668130288feffd52ef6fc5f9b5bc1894f2a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T07:56:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T07:56:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mq886\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T07:56:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-8tcxj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T07:57:56Z is after 2025-08-24T17:21:41Z" Oct 07 07:57:56 crc kubenswrapper[4875]: I1007 07:57:56.071950 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:56 crc kubenswrapper[4875]: I1007 07:57:56.072023 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:56 crc kubenswrapper[4875]: I1007 07:57:56.072043 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:56 crc kubenswrapper[4875]: I1007 07:57:56.072074 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:56 crc kubenswrapper[4875]: I1007 07:57:56.072099 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:56Z","lastTransitionTime":"2025-10-07T07:57:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:56 crc kubenswrapper[4875]: I1007 07:57:56.175354 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:56 crc kubenswrapper[4875]: I1007 07:57:56.175432 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:56 crc kubenswrapper[4875]: I1007 07:57:56.175444 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:56 crc kubenswrapper[4875]: I1007 07:57:56.175463 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:56 crc kubenswrapper[4875]: I1007 07:57:56.175476 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:56Z","lastTransitionTime":"2025-10-07T07:57:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:56 crc kubenswrapper[4875]: I1007 07:57:56.278401 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:56 crc kubenswrapper[4875]: I1007 07:57:56.278487 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:56 crc kubenswrapper[4875]: I1007 07:57:56.278498 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:56 crc kubenswrapper[4875]: I1007 07:57:56.278518 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:56 crc kubenswrapper[4875]: I1007 07:57:56.278530 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:56Z","lastTransitionTime":"2025-10-07T07:57:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:56 crc kubenswrapper[4875]: I1007 07:57:56.384628 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:56 crc kubenswrapper[4875]: I1007 07:57:56.384670 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:56 crc kubenswrapper[4875]: I1007 07:57:56.384680 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:56 crc kubenswrapper[4875]: I1007 07:57:56.384699 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:56 crc kubenswrapper[4875]: I1007 07:57:56.384709 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:56Z","lastTransitionTime":"2025-10-07T07:57:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:56 crc kubenswrapper[4875]: I1007 07:57:56.488590 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:56 crc kubenswrapper[4875]: I1007 07:57:56.488637 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:56 crc kubenswrapper[4875]: I1007 07:57:56.488645 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:56 crc kubenswrapper[4875]: I1007 07:57:56.488663 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:56 crc kubenswrapper[4875]: I1007 07:57:56.488672 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:56Z","lastTransitionTime":"2025-10-07T07:57:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:56 crc kubenswrapper[4875]: I1007 07:57:56.591821 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:56 crc kubenswrapper[4875]: I1007 07:57:56.591867 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:56 crc kubenswrapper[4875]: I1007 07:57:56.591899 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:56 crc kubenswrapper[4875]: I1007 07:57:56.591920 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:56 crc kubenswrapper[4875]: I1007 07:57:56.591930 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:56Z","lastTransitionTime":"2025-10-07T07:57:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:56 crc kubenswrapper[4875]: I1007 07:57:56.695148 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:56 crc kubenswrapper[4875]: I1007 07:57:56.695204 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:56 crc kubenswrapper[4875]: I1007 07:57:56.695222 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:56 crc kubenswrapper[4875]: I1007 07:57:56.695242 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:56 crc kubenswrapper[4875]: I1007 07:57:56.695255 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:56Z","lastTransitionTime":"2025-10-07T07:57:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:56 crc kubenswrapper[4875]: I1007 07:57:56.696406 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-wk8rw" Oct 07 07:57:56 crc kubenswrapper[4875]: I1007 07:57:56.696498 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 07:57:56 crc kubenswrapper[4875]: E1007 07:57:56.696573 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-wk8rw" podUID="dce21abc-1295-4d45-bd26-07b7e37d674c" Oct 07 07:57:56 crc kubenswrapper[4875]: E1007 07:57:56.696658 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 07:57:56 crc kubenswrapper[4875]: I1007 07:57:56.797635 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:56 crc kubenswrapper[4875]: I1007 07:57:56.797714 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:56 crc kubenswrapper[4875]: I1007 07:57:56.797739 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:56 crc kubenswrapper[4875]: I1007 07:57:56.797773 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:56 crc kubenswrapper[4875]: I1007 07:57:56.797800 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:56Z","lastTransitionTime":"2025-10-07T07:57:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:56 crc kubenswrapper[4875]: I1007 07:57:56.901955 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:56 crc kubenswrapper[4875]: I1007 07:57:56.902037 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:56 crc kubenswrapper[4875]: I1007 07:57:56.902059 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:56 crc kubenswrapper[4875]: I1007 07:57:56.902087 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:56 crc kubenswrapper[4875]: I1007 07:57:56.902110 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:56Z","lastTransitionTime":"2025-10-07T07:57:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:57 crc kubenswrapper[4875]: I1007 07:57:57.004731 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:57 crc kubenswrapper[4875]: I1007 07:57:57.004784 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:57 crc kubenswrapper[4875]: I1007 07:57:57.004795 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:57 crc kubenswrapper[4875]: I1007 07:57:57.004811 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:57 crc kubenswrapper[4875]: I1007 07:57:57.004822 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:57Z","lastTransitionTime":"2025-10-07T07:57:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:57 crc kubenswrapper[4875]: I1007 07:57:57.107679 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:57 crc kubenswrapper[4875]: I1007 07:57:57.107716 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:57 crc kubenswrapper[4875]: I1007 07:57:57.107725 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:57 crc kubenswrapper[4875]: I1007 07:57:57.107740 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:57 crc kubenswrapper[4875]: I1007 07:57:57.107749 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:57Z","lastTransitionTime":"2025-10-07T07:57:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:57 crc kubenswrapper[4875]: I1007 07:57:57.209925 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:57 crc kubenswrapper[4875]: I1007 07:57:57.209992 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:57 crc kubenswrapper[4875]: I1007 07:57:57.210009 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:57 crc kubenswrapper[4875]: I1007 07:57:57.210035 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:57 crc kubenswrapper[4875]: I1007 07:57:57.210052 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:57Z","lastTransitionTime":"2025-10-07T07:57:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:57 crc kubenswrapper[4875]: I1007 07:57:57.313678 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:57 crc kubenswrapper[4875]: I1007 07:57:57.313733 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:57 crc kubenswrapper[4875]: I1007 07:57:57.313741 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:57 crc kubenswrapper[4875]: I1007 07:57:57.313758 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:57 crc kubenswrapper[4875]: I1007 07:57:57.313769 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:57Z","lastTransitionTime":"2025-10-07T07:57:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:57 crc kubenswrapper[4875]: I1007 07:57:57.416754 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:57 crc kubenswrapper[4875]: I1007 07:57:57.416806 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:57 crc kubenswrapper[4875]: I1007 07:57:57.416815 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:57 crc kubenswrapper[4875]: I1007 07:57:57.416834 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:57 crc kubenswrapper[4875]: I1007 07:57:57.416844 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:57Z","lastTransitionTime":"2025-10-07T07:57:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:57 crc kubenswrapper[4875]: I1007 07:57:57.519304 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:57 crc kubenswrapper[4875]: I1007 07:57:57.519348 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:57 crc kubenswrapper[4875]: I1007 07:57:57.519361 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:57 crc kubenswrapper[4875]: I1007 07:57:57.519380 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:57 crc kubenswrapper[4875]: I1007 07:57:57.519393 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:57Z","lastTransitionTime":"2025-10-07T07:57:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:57 crc kubenswrapper[4875]: I1007 07:57:57.622247 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:57 crc kubenswrapper[4875]: I1007 07:57:57.622399 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:57 crc kubenswrapper[4875]: I1007 07:57:57.622410 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:57 crc kubenswrapper[4875]: I1007 07:57:57.622433 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:57 crc kubenswrapper[4875]: I1007 07:57:57.622449 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:57Z","lastTransitionTime":"2025-10-07T07:57:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:57 crc kubenswrapper[4875]: I1007 07:57:57.697579 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 07:57:57 crc kubenswrapper[4875]: I1007 07:57:57.697579 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 07:57:57 crc kubenswrapper[4875]: E1007 07:57:57.697938 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 07:57:57 crc kubenswrapper[4875]: E1007 07:57:57.698137 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 07:57:57 crc kubenswrapper[4875]: I1007 07:57:57.724957 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:57 crc kubenswrapper[4875]: I1007 07:57:57.725040 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:57 crc kubenswrapper[4875]: I1007 07:57:57.725062 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:57 crc kubenswrapper[4875]: I1007 07:57:57.725088 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:57 crc kubenswrapper[4875]: I1007 07:57:57.725109 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:57Z","lastTransitionTime":"2025-10-07T07:57:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:57 crc kubenswrapper[4875]: I1007 07:57:57.827562 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:57 crc kubenswrapper[4875]: I1007 07:57:57.827596 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:57 crc kubenswrapper[4875]: I1007 07:57:57.827606 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:57 crc kubenswrapper[4875]: I1007 07:57:57.827624 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:57 crc kubenswrapper[4875]: I1007 07:57:57.827636 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:57Z","lastTransitionTime":"2025-10-07T07:57:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:57 crc kubenswrapper[4875]: I1007 07:57:57.931472 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:57 crc kubenswrapper[4875]: I1007 07:57:57.931515 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:57 crc kubenswrapper[4875]: I1007 07:57:57.931525 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:57 crc kubenswrapper[4875]: I1007 07:57:57.931542 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:57 crc kubenswrapper[4875]: I1007 07:57:57.931555 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:57Z","lastTransitionTime":"2025-10-07T07:57:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:58 crc kubenswrapper[4875]: I1007 07:57:58.034381 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:58 crc kubenswrapper[4875]: I1007 07:57:58.034414 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:58 crc kubenswrapper[4875]: I1007 07:57:58.034423 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:58 crc kubenswrapper[4875]: I1007 07:57:58.034440 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:58 crc kubenswrapper[4875]: I1007 07:57:58.034450 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:58Z","lastTransitionTime":"2025-10-07T07:57:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:58 crc kubenswrapper[4875]: I1007 07:57:58.137053 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:58 crc kubenswrapper[4875]: I1007 07:57:58.137111 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:58 crc kubenswrapper[4875]: I1007 07:57:58.137124 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:58 crc kubenswrapper[4875]: I1007 07:57:58.137144 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:58 crc kubenswrapper[4875]: I1007 07:57:58.137171 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:58Z","lastTransitionTime":"2025-10-07T07:57:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:58 crc kubenswrapper[4875]: I1007 07:57:58.239734 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:58 crc kubenswrapper[4875]: I1007 07:57:58.239771 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:58 crc kubenswrapper[4875]: I1007 07:57:58.239779 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:58 crc kubenswrapper[4875]: I1007 07:57:58.239794 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:58 crc kubenswrapper[4875]: I1007 07:57:58.239805 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:58Z","lastTransitionTime":"2025-10-07T07:57:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:58 crc kubenswrapper[4875]: I1007 07:57:58.341416 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:58 crc kubenswrapper[4875]: I1007 07:57:58.341459 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:58 crc kubenswrapper[4875]: I1007 07:57:58.341470 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:58 crc kubenswrapper[4875]: I1007 07:57:58.341486 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:58 crc kubenswrapper[4875]: I1007 07:57:58.341497 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:58Z","lastTransitionTime":"2025-10-07T07:57:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:58 crc kubenswrapper[4875]: I1007 07:57:58.443592 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:58 crc kubenswrapper[4875]: I1007 07:57:58.443627 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:58 crc kubenswrapper[4875]: I1007 07:57:58.443635 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:58 crc kubenswrapper[4875]: I1007 07:57:58.443651 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:58 crc kubenswrapper[4875]: I1007 07:57:58.443661 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:58Z","lastTransitionTime":"2025-10-07T07:57:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:58 crc kubenswrapper[4875]: I1007 07:57:58.546637 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:58 crc kubenswrapper[4875]: I1007 07:57:58.546692 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:58 crc kubenswrapper[4875]: I1007 07:57:58.546703 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:58 crc kubenswrapper[4875]: I1007 07:57:58.546720 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:58 crc kubenswrapper[4875]: I1007 07:57:58.546733 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:58Z","lastTransitionTime":"2025-10-07T07:57:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:58 crc kubenswrapper[4875]: I1007 07:57:58.650082 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:58 crc kubenswrapper[4875]: I1007 07:57:58.650122 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:58 crc kubenswrapper[4875]: I1007 07:57:58.650133 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:58 crc kubenswrapper[4875]: I1007 07:57:58.650151 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:58 crc kubenswrapper[4875]: I1007 07:57:58.650163 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:58Z","lastTransitionTime":"2025-10-07T07:57:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:58 crc kubenswrapper[4875]: I1007 07:57:58.696821 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-wk8rw" Oct 07 07:57:58 crc kubenswrapper[4875]: E1007 07:57:58.697054 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-wk8rw" podUID="dce21abc-1295-4d45-bd26-07b7e37d674c" Oct 07 07:57:58 crc kubenswrapper[4875]: I1007 07:57:58.696855 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 07:57:58 crc kubenswrapper[4875]: E1007 07:57:58.697684 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 07:57:58 crc kubenswrapper[4875]: I1007 07:57:58.752709 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:58 crc kubenswrapper[4875]: I1007 07:57:58.752768 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:58 crc kubenswrapper[4875]: I1007 07:57:58.752779 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:58 crc kubenswrapper[4875]: I1007 07:57:58.752801 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:58 crc kubenswrapper[4875]: I1007 07:57:58.752814 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:58Z","lastTransitionTime":"2025-10-07T07:57:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:58 crc kubenswrapper[4875]: I1007 07:57:58.856139 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:58 crc kubenswrapper[4875]: I1007 07:57:58.856206 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:58 crc kubenswrapper[4875]: I1007 07:57:58.856219 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:58 crc kubenswrapper[4875]: I1007 07:57:58.856243 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:58 crc kubenswrapper[4875]: I1007 07:57:58.856258 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:58Z","lastTransitionTime":"2025-10-07T07:57:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:58 crc kubenswrapper[4875]: I1007 07:57:58.960223 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:58 crc kubenswrapper[4875]: I1007 07:57:58.960274 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:58 crc kubenswrapper[4875]: I1007 07:57:58.960286 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:58 crc kubenswrapper[4875]: I1007 07:57:58.960304 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:58 crc kubenswrapper[4875]: I1007 07:57:58.960317 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:58Z","lastTransitionTime":"2025-10-07T07:57:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:59 crc kubenswrapper[4875]: I1007 07:57:59.063772 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:59 crc kubenswrapper[4875]: I1007 07:57:59.063849 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:59 crc kubenswrapper[4875]: I1007 07:57:59.063955 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:59 crc kubenswrapper[4875]: I1007 07:57:59.064034 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:59 crc kubenswrapper[4875]: I1007 07:57:59.064062 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:59Z","lastTransitionTime":"2025-10-07T07:57:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:59 crc kubenswrapper[4875]: I1007 07:57:59.167123 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:59 crc kubenswrapper[4875]: I1007 07:57:59.167204 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:59 crc kubenswrapper[4875]: I1007 07:57:59.167222 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:59 crc kubenswrapper[4875]: I1007 07:57:59.167244 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:59 crc kubenswrapper[4875]: I1007 07:57:59.167258 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:59Z","lastTransitionTime":"2025-10-07T07:57:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:59 crc kubenswrapper[4875]: I1007 07:57:59.270700 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:59 crc kubenswrapper[4875]: I1007 07:57:59.270773 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:59 crc kubenswrapper[4875]: I1007 07:57:59.270812 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:59 crc kubenswrapper[4875]: I1007 07:57:59.270844 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:59 crc kubenswrapper[4875]: I1007 07:57:59.270863 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:59Z","lastTransitionTime":"2025-10-07T07:57:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:59 crc kubenswrapper[4875]: I1007 07:57:59.373508 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:59 crc kubenswrapper[4875]: I1007 07:57:59.373544 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:59 crc kubenswrapper[4875]: I1007 07:57:59.373554 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:59 crc kubenswrapper[4875]: I1007 07:57:59.373567 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:59 crc kubenswrapper[4875]: I1007 07:57:59.373578 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:59Z","lastTransitionTime":"2025-10-07T07:57:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:59 crc kubenswrapper[4875]: I1007 07:57:59.476745 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:59 crc kubenswrapper[4875]: I1007 07:57:59.476789 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:59 crc kubenswrapper[4875]: I1007 07:57:59.476798 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:59 crc kubenswrapper[4875]: I1007 07:57:59.476816 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:59 crc kubenswrapper[4875]: I1007 07:57:59.476828 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:59Z","lastTransitionTime":"2025-10-07T07:57:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:59 crc kubenswrapper[4875]: I1007 07:57:59.579853 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:59 crc kubenswrapper[4875]: I1007 07:57:59.579930 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:59 crc kubenswrapper[4875]: I1007 07:57:59.579943 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:59 crc kubenswrapper[4875]: I1007 07:57:59.579960 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:59 crc kubenswrapper[4875]: I1007 07:57:59.579969 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:59Z","lastTransitionTime":"2025-10-07T07:57:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:59 crc kubenswrapper[4875]: I1007 07:57:59.682644 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:59 crc kubenswrapper[4875]: I1007 07:57:59.682711 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:59 crc kubenswrapper[4875]: I1007 07:57:59.682729 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:59 crc kubenswrapper[4875]: I1007 07:57:59.682757 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:59 crc kubenswrapper[4875]: I1007 07:57:59.682778 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:59Z","lastTransitionTime":"2025-10-07T07:57:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:59 crc kubenswrapper[4875]: I1007 07:57:59.697053 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 07:57:59 crc kubenswrapper[4875]: I1007 07:57:59.697055 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 07:57:59 crc kubenswrapper[4875]: E1007 07:57:59.697410 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 07:57:59 crc kubenswrapper[4875]: E1007 07:57:59.697575 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 07:57:59 crc kubenswrapper[4875]: I1007 07:57:59.786262 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:59 crc kubenswrapper[4875]: I1007 07:57:59.786355 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:59 crc kubenswrapper[4875]: I1007 07:57:59.786374 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:59 crc kubenswrapper[4875]: I1007 07:57:59.786837 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:59 crc kubenswrapper[4875]: I1007 07:57:59.787064 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:59Z","lastTransitionTime":"2025-10-07T07:57:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:59 crc kubenswrapper[4875]: I1007 07:57:59.890383 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:59 crc kubenswrapper[4875]: I1007 07:57:59.890502 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:59 crc kubenswrapper[4875]: I1007 07:57:59.890528 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:59 crc kubenswrapper[4875]: I1007 07:57:59.890564 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:59 crc kubenswrapper[4875]: I1007 07:57:59.890587 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:59Z","lastTransitionTime":"2025-10-07T07:57:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:57:59 crc kubenswrapper[4875]: I1007 07:57:59.993606 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:57:59 crc kubenswrapper[4875]: I1007 07:57:59.993654 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:57:59 crc kubenswrapper[4875]: I1007 07:57:59.993664 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:57:59 crc kubenswrapper[4875]: I1007 07:57:59.993681 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:57:59 crc kubenswrapper[4875]: I1007 07:57:59.993696 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:57:59Z","lastTransitionTime":"2025-10-07T07:57:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:58:00 crc kubenswrapper[4875]: I1007 07:58:00.096010 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:58:00 crc kubenswrapper[4875]: I1007 07:58:00.096088 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:58:00 crc kubenswrapper[4875]: I1007 07:58:00.096126 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:58:00 crc kubenswrapper[4875]: I1007 07:58:00.096161 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:58:00 crc kubenswrapper[4875]: I1007 07:58:00.096186 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:58:00Z","lastTransitionTime":"2025-10-07T07:58:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:58:00 crc kubenswrapper[4875]: I1007 07:58:00.199483 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:58:00 crc kubenswrapper[4875]: I1007 07:58:00.199541 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:58:00 crc kubenswrapper[4875]: I1007 07:58:00.199555 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:58:00 crc kubenswrapper[4875]: I1007 07:58:00.199580 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:58:00 crc kubenswrapper[4875]: I1007 07:58:00.199600 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:58:00Z","lastTransitionTime":"2025-10-07T07:58:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:58:00 crc kubenswrapper[4875]: I1007 07:58:00.302321 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:58:00 crc kubenswrapper[4875]: I1007 07:58:00.302388 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:58:00 crc kubenswrapper[4875]: I1007 07:58:00.302403 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:58:00 crc kubenswrapper[4875]: I1007 07:58:00.302451 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:58:00 crc kubenswrapper[4875]: I1007 07:58:00.302469 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:58:00Z","lastTransitionTime":"2025-10-07T07:58:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:58:00 crc kubenswrapper[4875]: I1007 07:58:00.405962 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:58:00 crc kubenswrapper[4875]: I1007 07:58:00.406015 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:58:00 crc kubenswrapper[4875]: I1007 07:58:00.406030 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:58:00 crc kubenswrapper[4875]: I1007 07:58:00.406051 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:58:00 crc kubenswrapper[4875]: I1007 07:58:00.406063 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:58:00Z","lastTransitionTime":"2025-10-07T07:58:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:58:00 crc kubenswrapper[4875]: I1007 07:58:00.509397 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:58:00 crc kubenswrapper[4875]: I1007 07:58:00.509477 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:58:00 crc kubenswrapper[4875]: I1007 07:58:00.509501 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:58:00 crc kubenswrapper[4875]: I1007 07:58:00.509536 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:58:00 crc kubenswrapper[4875]: I1007 07:58:00.509560 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:58:00Z","lastTransitionTime":"2025-10-07T07:58:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:58:00 crc kubenswrapper[4875]: I1007 07:58:00.613514 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:58:00 crc kubenswrapper[4875]: I1007 07:58:00.613905 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:58:00 crc kubenswrapper[4875]: I1007 07:58:00.614044 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:58:00 crc kubenswrapper[4875]: I1007 07:58:00.614143 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:58:00 crc kubenswrapper[4875]: I1007 07:58:00.614225 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:58:00Z","lastTransitionTime":"2025-10-07T07:58:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:58:00 crc kubenswrapper[4875]: I1007 07:58:00.697441 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 07:58:00 crc kubenswrapper[4875]: E1007 07:58:00.697588 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 07:58:00 crc kubenswrapper[4875]: I1007 07:58:00.697798 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-wk8rw" Oct 07 07:58:00 crc kubenswrapper[4875]: E1007 07:58:00.697955 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-wk8rw" podUID="dce21abc-1295-4d45-bd26-07b7e37d674c" Oct 07 07:58:00 crc kubenswrapper[4875]: I1007 07:58:00.717647 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:58:00 crc kubenswrapper[4875]: I1007 07:58:00.717712 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:58:00 crc kubenswrapper[4875]: I1007 07:58:00.717725 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:58:00 crc kubenswrapper[4875]: I1007 07:58:00.717748 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:58:00 crc kubenswrapper[4875]: I1007 07:58:00.717763 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:58:00Z","lastTransitionTime":"2025-10-07T07:58:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:58:00 crc kubenswrapper[4875]: I1007 07:58:00.820962 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:58:00 crc kubenswrapper[4875]: I1007 07:58:00.821030 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:58:00 crc kubenswrapper[4875]: I1007 07:58:00.821045 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:58:00 crc kubenswrapper[4875]: I1007 07:58:00.821068 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:58:00 crc kubenswrapper[4875]: I1007 07:58:00.821082 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:58:00Z","lastTransitionTime":"2025-10-07T07:58:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:58:00 crc kubenswrapper[4875]: I1007 07:58:00.924360 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:58:00 crc kubenswrapper[4875]: I1007 07:58:00.924429 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:58:00 crc kubenswrapper[4875]: I1007 07:58:00.924441 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:58:00 crc kubenswrapper[4875]: I1007 07:58:00.924466 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:58:00 crc kubenswrapper[4875]: I1007 07:58:00.924481 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:58:00Z","lastTransitionTime":"2025-10-07T07:58:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:58:01 crc kubenswrapper[4875]: I1007 07:58:01.027306 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:58:01 crc kubenswrapper[4875]: I1007 07:58:01.027353 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:58:01 crc kubenswrapper[4875]: I1007 07:58:01.027367 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:58:01 crc kubenswrapper[4875]: I1007 07:58:01.027389 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:58:01 crc kubenswrapper[4875]: I1007 07:58:01.027404 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:58:01Z","lastTransitionTime":"2025-10-07T07:58:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:58:01 crc kubenswrapper[4875]: I1007 07:58:01.130864 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:58:01 crc kubenswrapper[4875]: I1007 07:58:01.131028 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:58:01 crc kubenswrapper[4875]: I1007 07:58:01.131051 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:58:01 crc kubenswrapper[4875]: I1007 07:58:01.131081 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:58:01 crc kubenswrapper[4875]: I1007 07:58:01.131101 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:58:01Z","lastTransitionTime":"2025-10-07T07:58:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:58:01 crc kubenswrapper[4875]: I1007 07:58:01.234305 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:58:01 crc kubenswrapper[4875]: I1007 07:58:01.234341 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:58:01 crc kubenswrapper[4875]: I1007 07:58:01.234350 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:58:01 crc kubenswrapper[4875]: I1007 07:58:01.234367 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:58:01 crc kubenswrapper[4875]: I1007 07:58:01.234376 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:58:01Z","lastTransitionTime":"2025-10-07T07:58:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:58:01 crc kubenswrapper[4875]: I1007 07:58:01.337137 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:58:01 crc kubenswrapper[4875]: I1007 07:58:01.337560 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:58:01 crc kubenswrapper[4875]: I1007 07:58:01.337647 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:58:01 crc kubenswrapper[4875]: I1007 07:58:01.337742 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:58:01 crc kubenswrapper[4875]: I1007 07:58:01.337820 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:58:01Z","lastTransitionTime":"2025-10-07T07:58:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:58:01 crc kubenswrapper[4875]: I1007 07:58:01.440898 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:58:01 crc kubenswrapper[4875]: I1007 07:58:01.441344 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:58:01 crc kubenswrapper[4875]: I1007 07:58:01.441485 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:58:01 crc kubenswrapper[4875]: I1007 07:58:01.441583 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:58:01 crc kubenswrapper[4875]: I1007 07:58:01.441656 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:58:01Z","lastTransitionTime":"2025-10-07T07:58:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:58:01 crc kubenswrapper[4875]: I1007 07:58:01.543961 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:58:01 crc kubenswrapper[4875]: I1007 07:58:01.544007 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:58:01 crc kubenswrapper[4875]: I1007 07:58:01.544017 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:58:01 crc kubenswrapper[4875]: I1007 07:58:01.544037 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:58:01 crc kubenswrapper[4875]: I1007 07:58:01.544048 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:58:01Z","lastTransitionTime":"2025-10-07T07:58:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:58:01 crc kubenswrapper[4875]: I1007 07:58:01.647211 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:58:01 crc kubenswrapper[4875]: I1007 07:58:01.647311 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:58:01 crc kubenswrapper[4875]: I1007 07:58:01.647336 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:58:01 crc kubenswrapper[4875]: I1007 07:58:01.647369 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:58:01 crc kubenswrapper[4875]: I1007 07:58:01.647399 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:58:01Z","lastTransitionTime":"2025-10-07T07:58:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:58:01 crc kubenswrapper[4875]: I1007 07:58:01.696786 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 07:58:01 crc kubenswrapper[4875]: I1007 07:58:01.696810 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 07:58:01 crc kubenswrapper[4875]: E1007 07:58:01.697375 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 07:58:01 crc kubenswrapper[4875]: E1007 07:58:01.697538 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 07:58:01 crc kubenswrapper[4875]: I1007 07:58:01.751031 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:58:01 crc kubenswrapper[4875]: I1007 07:58:01.751459 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:58:01 crc kubenswrapper[4875]: I1007 07:58:01.751549 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:58:01 crc kubenswrapper[4875]: I1007 07:58:01.751699 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:58:01 crc kubenswrapper[4875]: I1007 07:58:01.751820 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:58:01Z","lastTransitionTime":"2025-10-07T07:58:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:58:01 crc kubenswrapper[4875]: I1007 07:58:01.855552 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:58:01 crc kubenswrapper[4875]: I1007 07:58:01.855613 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:58:01 crc kubenswrapper[4875]: I1007 07:58:01.855623 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:58:01 crc kubenswrapper[4875]: I1007 07:58:01.855645 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:58:01 crc kubenswrapper[4875]: I1007 07:58:01.855657 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:58:01Z","lastTransitionTime":"2025-10-07T07:58:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:58:01 crc kubenswrapper[4875]: I1007 07:58:01.958683 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:58:01 crc kubenswrapper[4875]: I1007 07:58:01.958733 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:58:01 crc kubenswrapper[4875]: I1007 07:58:01.958743 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:58:01 crc kubenswrapper[4875]: I1007 07:58:01.958767 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:58:01 crc kubenswrapper[4875]: I1007 07:58:01.958778 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:58:01Z","lastTransitionTime":"2025-10-07T07:58:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:58:02 crc kubenswrapper[4875]: I1007 07:58:02.061479 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:58:02 crc kubenswrapper[4875]: I1007 07:58:02.061523 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:58:02 crc kubenswrapper[4875]: I1007 07:58:02.061532 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:58:02 crc kubenswrapper[4875]: I1007 07:58:02.061550 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:58:02 crc kubenswrapper[4875]: I1007 07:58:02.061561 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:58:02Z","lastTransitionTime":"2025-10-07T07:58:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:58:02 crc kubenswrapper[4875]: I1007 07:58:02.163648 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:58:02 crc kubenswrapper[4875]: I1007 07:58:02.163689 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:58:02 crc kubenswrapper[4875]: I1007 07:58:02.163697 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:58:02 crc kubenswrapper[4875]: I1007 07:58:02.163709 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:58:02 crc kubenswrapper[4875]: I1007 07:58:02.163718 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:58:02Z","lastTransitionTime":"2025-10-07T07:58:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:58:02 crc kubenswrapper[4875]: I1007 07:58:02.266962 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:58:02 crc kubenswrapper[4875]: I1007 07:58:02.267011 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:58:02 crc kubenswrapper[4875]: I1007 07:58:02.267021 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:58:02 crc kubenswrapper[4875]: I1007 07:58:02.267037 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:58:02 crc kubenswrapper[4875]: I1007 07:58:02.267047 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:58:02Z","lastTransitionTime":"2025-10-07T07:58:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:58:02 crc kubenswrapper[4875]: I1007 07:58:02.370994 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:58:02 crc kubenswrapper[4875]: I1007 07:58:02.371048 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:58:02 crc kubenswrapper[4875]: I1007 07:58:02.371059 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:58:02 crc kubenswrapper[4875]: I1007 07:58:02.371080 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:58:02 crc kubenswrapper[4875]: I1007 07:58:02.371093 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:58:02Z","lastTransitionTime":"2025-10-07T07:58:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:58:02 crc kubenswrapper[4875]: I1007 07:58:02.474039 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:58:02 crc kubenswrapper[4875]: I1007 07:58:02.474111 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:58:02 crc kubenswrapper[4875]: I1007 07:58:02.474123 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:58:02 crc kubenswrapper[4875]: I1007 07:58:02.474142 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:58:02 crc kubenswrapper[4875]: I1007 07:58:02.474159 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:58:02Z","lastTransitionTime":"2025-10-07T07:58:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:58:02 crc kubenswrapper[4875]: I1007 07:58:02.577604 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:58:02 crc kubenswrapper[4875]: I1007 07:58:02.577698 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:58:02 crc kubenswrapper[4875]: I1007 07:58:02.577728 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:58:02 crc kubenswrapper[4875]: I1007 07:58:02.577764 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:58:02 crc kubenswrapper[4875]: I1007 07:58:02.577787 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:58:02Z","lastTransitionTime":"2025-10-07T07:58:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:58:02 crc kubenswrapper[4875]: I1007 07:58:02.681448 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:58:02 crc kubenswrapper[4875]: I1007 07:58:02.681538 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:58:02 crc kubenswrapper[4875]: I1007 07:58:02.681563 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:58:02 crc kubenswrapper[4875]: I1007 07:58:02.681597 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:58:02 crc kubenswrapper[4875]: I1007 07:58:02.681622 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:58:02Z","lastTransitionTime":"2025-10-07T07:58:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:58:02 crc kubenswrapper[4875]: I1007 07:58:02.696838 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 07:58:02 crc kubenswrapper[4875]: I1007 07:58:02.696925 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-wk8rw" Oct 07 07:58:02 crc kubenswrapper[4875]: E1007 07:58:02.697157 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 07:58:02 crc kubenswrapper[4875]: E1007 07:58:02.697264 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-wk8rw" podUID="dce21abc-1295-4d45-bd26-07b7e37d674c" Oct 07 07:58:02 crc kubenswrapper[4875]: I1007 07:58:02.785492 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:58:02 crc kubenswrapper[4875]: I1007 07:58:02.785548 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:58:02 crc kubenswrapper[4875]: I1007 07:58:02.785558 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:58:02 crc kubenswrapper[4875]: I1007 07:58:02.785581 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:58:02 crc kubenswrapper[4875]: I1007 07:58:02.785599 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:58:02Z","lastTransitionTime":"2025-10-07T07:58:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:58:02 crc kubenswrapper[4875]: I1007 07:58:02.888973 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:58:02 crc kubenswrapper[4875]: I1007 07:58:02.889052 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:58:02 crc kubenswrapper[4875]: I1007 07:58:02.889078 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:58:02 crc kubenswrapper[4875]: I1007 07:58:02.889113 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:58:02 crc kubenswrapper[4875]: I1007 07:58:02.889139 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:58:02Z","lastTransitionTime":"2025-10-07T07:58:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:58:02 crc kubenswrapper[4875]: I1007 07:58:02.992492 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:58:02 crc kubenswrapper[4875]: I1007 07:58:02.992557 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:58:02 crc kubenswrapper[4875]: I1007 07:58:02.992570 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:58:02 crc kubenswrapper[4875]: I1007 07:58:02.992594 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:58:02 crc kubenswrapper[4875]: I1007 07:58:02.992610 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:58:02Z","lastTransitionTime":"2025-10-07T07:58:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:58:03 crc kubenswrapper[4875]: I1007 07:58:03.096051 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:58:03 crc kubenswrapper[4875]: I1007 07:58:03.096134 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:58:03 crc kubenswrapper[4875]: I1007 07:58:03.096160 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:58:03 crc kubenswrapper[4875]: I1007 07:58:03.096196 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:58:03 crc kubenswrapper[4875]: I1007 07:58:03.096218 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:58:03Z","lastTransitionTime":"2025-10-07T07:58:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:58:03 crc kubenswrapper[4875]: I1007 07:58:03.199491 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:58:03 crc kubenswrapper[4875]: I1007 07:58:03.199558 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:58:03 crc kubenswrapper[4875]: I1007 07:58:03.199583 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:58:03 crc kubenswrapper[4875]: I1007 07:58:03.199616 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:58:03 crc kubenswrapper[4875]: I1007 07:58:03.199642 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:58:03Z","lastTransitionTime":"2025-10-07T07:58:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:58:03 crc kubenswrapper[4875]: I1007 07:58:03.302335 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:58:03 crc kubenswrapper[4875]: I1007 07:58:03.302454 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:58:03 crc kubenswrapper[4875]: I1007 07:58:03.302489 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:58:03 crc kubenswrapper[4875]: I1007 07:58:03.302524 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:58:03 crc kubenswrapper[4875]: I1007 07:58:03.302548 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:58:03Z","lastTransitionTime":"2025-10-07T07:58:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:58:03 crc kubenswrapper[4875]: I1007 07:58:03.405976 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:58:03 crc kubenswrapper[4875]: I1007 07:58:03.406067 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:58:03 crc kubenswrapper[4875]: I1007 07:58:03.406090 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:58:03 crc kubenswrapper[4875]: I1007 07:58:03.406122 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:58:03 crc kubenswrapper[4875]: I1007 07:58:03.406148 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:58:03Z","lastTransitionTime":"2025-10-07T07:58:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:58:03 crc kubenswrapper[4875]: I1007 07:58:03.509180 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:58:03 crc kubenswrapper[4875]: I1007 07:58:03.509262 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:58:03 crc kubenswrapper[4875]: I1007 07:58:03.509278 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:58:03 crc kubenswrapper[4875]: I1007 07:58:03.509308 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:58:03 crc kubenswrapper[4875]: I1007 07:58:03.509330 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:58:03Z","lastTransitionTime":"2025-10-07T07:58:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:58:03 crc kubenswrapper[4875]: I1007 07:58:03.612686 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:58:03 crc kubenswrapper[4875]: I1007 07:58:03.612762 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:58:03 crc kubenswrapper[4875]: I1007 07:58:03.612776 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:58:03 crc kubenswrapper[4875]: I1007 07:58:03.612798 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:58:03 crc kubenswrapper[4875]: I1007 07:58:03.612815 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:58:03Z","lastTransitionTime":"2025-10-07T07:58:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:58:03 crc kubenswrapper[4875]: I1007 07:58:03.696585 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 07:58:03 crc kubenswrapper[4875]: E1007 07:58:03.696839 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 07:58:03 crc kubenswrapper[4875]: I1007 07:58:03.696989 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 07:58:03 crc kubenswrapper[4875]: E1007 07:58:03.697303 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 07:58:03 crc kubenswrapper[4875]: I1007 07:58:03.715742 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:58:03 crc kubenswrapper[4875]: I1007 07:58:03.715806 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:58:03 crc kubenswrapper[4875]: I1007 07:58:03.715818 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:58:03 crc kubenswrapper[4875]: I1007 07:58:03.715839 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:58:03 crc kubenswrapper[4875]: I1007 07:58:03.715852 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:58:03Z","lastTransitionTime":"2025-10-07T07:58:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:58:03 crc kubenswrapper[4875]: I1007 07:58:03.819310 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:58:03 crc kubenswrapper[4875]: I1007 07:58:03.819385 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:58:03 crc kubenswrapper[4875]: I1007 07:58:03.819414 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:58:03 crc kubenswrapper[4875]: I1007 07:58:03.819463 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:58:03 crc kubenswrapper[4875]: I1007 07:58:03.819492 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:58:03Z","lastTransitionTime":"2025-10-07T07:58:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:58:03 crc kubenswrapper[4875]: I1007 07:58:03.922550 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:58:03 crc kubenswrapper[4875]: I1007 07:58:03.922663 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:58:03 crc kubenswrapper[4875]: I1007 07:58:03.922690 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:58:03 crc kubenswrapper[4875]: I1007 07:58:03.922729 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:58:03 crc kubenswrapper[4875]: I1007 07:58:03.922755 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:58:03Z","lastTransitionTime":"2025-10-07T07:58:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:58:04 crc kubenswrapper[4875]: I1007 07:58:04.025956 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:58:04 crc kubenswrapper[4875]: I1007 07:58:04.026007 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:58:04 crc kubenswrapper[4875]: I1007 07:58:04.026017 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:58:04 crc kubenswrapper[4875]: I1007 07:58:04.026034 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:58:04 crc kubenswrapper[4875]: I1007 07:58:04.026046 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:58:04Z","lastTransitionTime":"2025-10-07T07:58:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:58:04 crc kubenswrapper[4875]: I1007 07:58:04.088779 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 07:58:04 crc kubenswrapper[4875]: I1007 07:58:04.088829 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 07:58:04 crc kubenswrapper[4875]: I1007 07:58:04.088845 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 07:58:04 crc kubenswrapper[4875]: I1007 07:58:04.088866 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 07:58:04 crc kubenswrapper[4875]: I1007 07:58:04.088913 4875 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T07:58:04Z","lastTransitionTime":"2025-10-07T07:58:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 07:58:04 crc kubenswrapper[4875]: I1007 07:58:04.161835 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-version/cluster-version-operator-5c965bbfc6-hkwfc"] Oct 07 07:58:04 crc kubenswrapper[4875]: I1007 07:58:04.162317 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-hkwfc" Oct 07 07:58:04 crc kubenswrapper[4875]: I1007 07:58:04.165621 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Oct 07 07:58:04 crc kubenswrapper[4875]: I1007 07:58:04.165858 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Oct 07 07:58:04 crc kubenswrapper[4875]: I1007 07:58:04.166367 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Oct 07 07:58:04 crc kubenswrapper[4875]: I1007 07:58:04.173308 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Oct 07 07:58:04 crc kubenswrapper[4875]: I1007 07:58:04.217544 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-additional-cni-plugins-zk2kz" podStartSLOduration=88.217519896 podStartE2EDuration="1m28.217519896s" podCreationTimestamp="2025-10-07 07:56:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 07:58:04.217221946 +0000 UTC m=+109.176992549" watchObservedRunningTime="2025-10-07 07:58:04.217519896 +0000 UTC m=+109.177290439" Oct 07 07:58:04 crc kubenswrapper[4875]: I1007 07:58:04.218049 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/node-resolver-9tw9m" podStartSLOduration=88.218043802 podStartE2EDuration="1m28.218043802s" podCreationTimestamp="2025-10-07 07:56:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 07:58:04.195563635 +0000 UTC m=+109.155334178" watchObservedRunningTime="2025-10-07 07:58:04.218043802 +0000 UTC m=+109.177814345" Oct 07 07:58:04 crc kubenswrapper[4875]: I1007 07:58:04.248266 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" podStartSLOduration=60.248225932 podStartE2EDuration="1m0.248225932s" podCreationTimestamp="2025-10-07 07:57:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 07:58:04.233104167 +0000 UTC m=+109.192874740" watchObservedRunningTime="2025-10-07 07:58:04.248225932 +0000 UTC m=+109.207996475" Oct 07 07:58:04 crc kubenswrapper[4875]: I1007 07:58:04.279509 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/53dcf645-ce93-4ba3-be30-a88377490f64-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-hkwfc\" (UID: \"53dcf645-ce93-4ba3-be30-a88377490f64\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-hkwfc" Oct 07 07:58:04 crc kubenswrapper[4875]: I1007 07:58:04.279556 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/53dcf645-ce93-4ba3-be30-a88377490f64-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-hkwfc\" (UID: \"53dcf645-ce93-4ba3-be30-a88377490f64\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-hkwfc" Oct 07 07:58:04 crc kubenswrapper[4875]: I1007 07:58:04.279582 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/53dcf645-ce93-4ba3-be30-a88377490f64-service-ca\") pod \"cluster-version-operator-5c965bbfc6-hkwfc\" (UID: \"53dcf645-ce93-4ba3-be30-a88377490f64\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-hkwfc" Oct 07 07:58:04 crc kubenswrapper[4875]: I1007 07:58:04.279614 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/53dcf645-ce93-4ba3-be30-a88377490f64-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-hkwfc\" (UID: \"53dcf645-ce93-4ba3-be30-a88377490f64\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-hkwfc" Oct 07 07:58:04 crc kubenswrapper[4875]: I1007 07:58:04.279646 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/53dcf645-ce93-4ba3-be30-a88377490f64-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-hkwfc\" (UID: \"53dcf645-ce93-4ba3-be30-a88377490f64\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-hkwfc" Oct 07 07:58:04 crc kubenswrapper[4875]: I1007 07:58:04.298733 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/node-ca-ddx6l" podStartSLOduration=88.298717371 podStartE2EDuration="1m28.298717371s" podCreationTimestamp="2025-10-07 07:56:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 07:58:04.298028619 +0000 UTC m=+109.257799342" watchObservedRunningTime="2025-10-07 07:58:04.298717371 +0000 UTC m=+109.258487914" Oct 07 07:58:04 crc kubenswrapper[4875]: I1007 07:58:04.348006 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd/etcd-crc" podStartSLOduration=89.347991242 podStartE2EDuration="1m29.347991242s" podCreationTimestamp="2025-10-07 07:56:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 07:58:04.347743854 +0000 UTC m=+109.307514417" watchObservedRunningTime="2025-10-07 07:58:04.347991242 +0000 UTC m=+109.307761785" Oct 07 07:58:04 crc kubenswrapper[4875]: I1007 07:58:04.348517 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podStartSLOduration=88.348513578 podStartE2EDuration="1m28.348513578s" podCreationTimestamp="2025-10-07 07:56:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 07:58:04.327010861 +0000 UTC m=+109.286781414" watchObservedRunningTime="2025-10-07 07:58:04.348513578 +0000 UTC m=+109.308284111" Oct 07 07:58:04 crc kubenswrapper[4875]: I1007 07:58:04.380460 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/53dcf645-ce93-4ba3-be30-a88377490f64-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-hkwfc\" (UID: \"53dcf645-ce93-4ba3-be30-a88377490f64\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-hkwfc" Oct 07 07:58:04 crc kubenswrapper[4875]: I1007 07:58:04.380555 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/53dcf645-ce93-4ba3-be30-a88377490f64-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-hkwfc\" (UID: \"53dcf645-ce93-4ba3-be30-a88377490f64\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-hkwfc" Oct 07 07:58:04 crc kubenswrapper[4875]: I1007 07:58:04.380565 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/53dcf645-ce93-4ba3-be30-a88377490f64-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-hkwfc\" (UID: \"53dcf645-ce93-4ba3-be30-a88377490f64\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-hkwfc" Oct 07 07:58:04 crc kubenswrapper[4875]: I1007 07:58:04.380653 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/53dcf645-ce93-4ba3-be30-a88377490f64-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-hkwfc\" (UID: \"53dcf645-ce93-4ba3-be30-a88377490f64\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-hkwfc" Oct 07 07:58:04 crc kubenswrapper[4875]: I1007 07:58:04.380695 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/53dcf645-ce93-4ba3-be30-a88377490f64-service-ca\") pod \"cluster-version-operator-5c965bbfc6-hkwfc\" (UID: \"53dcf645-ce93-4ba3-be30-a88377490f64\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-hkwfc" Oct 07 07:58:04 crc kubenswrapper[4875]: I1007 07:58:04.380772 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/53dcf645-ce93-4ba3-be30-a88377490f64-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-hkwfc\" (UID: \"53dcf645-ce93-4ba3-be30-a88377490f64\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-hkwfc" Oct 07 07:58:04 crc kubenswrapper[4875]: I1007 07:58:04.380826 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/53dcf645-ce93-4ba3-be30-a88377490f64-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-hkwfc\" (UID: \"53dcf645-ce93-4ba3-be30-a88377490f64\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-hkwfc" Oct 07 07:58:04 crc kubenswrapper[4875]: I1007 07:58:04.381742 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/53dcf645-ce93-4ba3-be30-a88377490f64-service-ca\") pod \"cluster-version-operator-5c965bbfc6-hkwfc\" (UID: \"53dcf645-ce93-4ba3-be30-a88377490f64\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-hkwfc" Oct 07 07:58:04 crc kubenswrapper[4875]: I1007 07:58:04.388519 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/53dcf645-ce93-4ba3-be30-a88377490f64-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-hkwfc\" (UID: \"53dcf645-ce93-4ba3-be30-a88377490f64\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-hkwfc" Oct 07 07:58:04 crc kubenswrapper[4875]: I1007 07:58:04.399466 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/53dcf645-ce93-4ba3-be30-a88377490f64-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-hkwfc\" (UID: \"53dcf645-ce93-4ba3-be30-a88377490f64\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-hkwfc" Oct 07 07:58:04 crc kubenswrapper[4875]: I1007 07:58:04.420062 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-wc2jq" podStartSLOduration=88.420031199 podStartE2EDuration="1m28.420031199s" podCreationTimestamp="2025-10-07 07:56:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 07:58:04.406062269 +0000 UTC m=+109.365832822" watchObservedRunningTime="2025-10-07 07:58:04.420031199 +0000 UTC m=+109.379801762" Oct 07 07:58:04 crc kubenswrapper[4875]: I1007 07:58:04.444346 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rbr4j" podStartSLOduration=87.444323653 podStartE2EDuration="1m27.444323653s" podCreationTimestamp="2025-10-07 07:56:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 07:58:04.437028484 +0000 UTC m=+109.396799037" watchObservedRunningTime="2025-10-07 07:58:04.444323653 +0000 UTC m=+109.404094196" Oct 07 07:58:04 crc kubenswrapper[4875]: I1007 07:58:04.445572 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" podStartSLOduration=88.445565822 podStartE2EDuration="1m28.445565822s" podCreationTimestamp="2025-10-07 07:56:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 07:58:04.420615217 +0000 UTC m=+109.380385760" watchObservedRunningTime="2025-10-07 07:58:04.445565822 +0000 UTC m=+109.405336365" Oct 07 07:58:04 crc kubenswrapper[4875]: I1007 07:58:04.456491 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=88.456454805 podStartE2EDuration="1m28.456454805s" podCreationTimestamp="2025-10-07 07:56:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 07:58:04.455934378 +0000 UTC m=+109.415704921" watchObservedRunningTime="2025-10-07 07:58:04.456454805 +0000 UTC m=+109.416225368" Oct 07 07:58:04 crc kubenswrapper[4875]: I1007 07:58:04.468192 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" podStartSLOduration=42.468170224 podStartE2EDuration="42.468170224s" podCreationTimestamp="2025-10-07 07:57:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 07:58:04.467849834 +0000 UTC m=+109.427620387" watchObservedRunningTime="2025-10-07 07:58:04.468170224 +0000 UTC m=+109.427940767" Oct 07 07:58:04 crc kubenswrapper[4875]: I1007 07:58:04.481698 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-hkwfc" Oct 07 07:58:04 crc kubenswrapper[4875]: I1007 07:58:04.697130 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 07:58:04 crc kubenswrapper[4875]: I1007 07:58:04.697130 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-wk8rw" Oct 07 07:58:04 crc kubenswrapper[4875]: E1007 07:58:04.697438 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 07:58:04 crc kubenswrapper[4875]: E1007 07:58:04.697638 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-wk8rw" podUID="dce21abc-1295-4d45-bd26-07b7e37d674c" Oct 07 07:58:05 crc kubenswrapper[4875]: I1007 07:58:05.301790 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-hkwfc" event={"ID":"53dcf645-ce93-4ba3-be30-a88377490f64","Type":"ContainerStarted","Data":"fd04f2b880127c6faaa2c67652205d9f9d5aea68ebf0347c7c7181f58e60a8a7"} Oct 07 07:58:05 crc kubenswrapper[4875]: I1007 07:58:05.301904 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-hkwfc" event={"ID":"53dcf645-ce93-4ba3-be30-a88377490f64","Type":"ContainerStarted","Data":"d0b7aa79eff4e98a3931556b6ea88fcc76cc68a307f9dfe78ef9267e9181e9ec"} Oct 07 07:58:05 crc kubenswrapper[4875]: I1007 07:58:05.320110 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-hkwfc" podStartSLOduration=89.320070651 podStartE2EDuration="1m29.320070651s" podCreationTimestamp="2025-10-07 07:56:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 07:58:05.317838721 +0000 UTC m=+110.277609284" watchObservedRunningTime="2025-10-07 07:58:05.320070651 +0000 UTC m=+110.279841224" Oct 07 07:58:05 crc kubenswrapper[4875]: I1007 07:58:05.697123 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 07:58:05 crc kubenswrapper[4875]: I1007 07:58:05.697152 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 07:58:05 crc kubenswrapper[4875]: E1007 07:58:05.698665 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 07:58:05 crc kubenswrapper[4875]: E1007 07:58:05.698850 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 07:58:06 crc kubenswrapper[4875]: I1007 07:58:06.697184 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-wk8rw" Oct 07 07:58:06 crc kubenswrapper[4875]: I1007 07:58:06.697200 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 07:58:06 crc kubenswrapper[4875]: E1007 07:58:06.697512 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-wk8rw" podUID="dce21abc-1295-4d45-bd26-07b7e37d674c" Oct 07 07:58:06 crc kubenswrapper[4875]: E1007 07:58:06.697869 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 07:58:06 crc kubenswrapper[4875]: I1007 07:58:06.698928 4875 scope.go:117] "RemoveContainer" containerID="5d8ea2a287411218c24c1e8eaf4aeb64b4833e621385a4d8b32ffc0f79ce461f" Oct 07 07:58:06 crc kubenswrapper[4875]: E1007 07:58:06.699091 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-8tcxj_openshift-ovn-kubernetes(f7806e48-48e7-4680-af2e-e93a05003370)\"" pod="openshift-ovn-kubernetes/ovnkube-node-8tcxj" podUID="f7806e48-48e7-4680-af2e-e93a05003370" Oct 07 07:58:07 crc kubenswrapper[4875]: I1007 07:58:07.696855 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 07:58:07 crc kubenswrapper[4875]: I1007 07:58:07.696889 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 07:58:07 crc kubenswrapper[4875]: E1007 07:58:07.697054 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 07:58:07 crc kubenswrapper[4875]: E1007 07:58:07.697197 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 07:58:08 crc kubenswrapper[4875]: I1007 07:58:08.697172 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-wk8rw" Oct 07 07:58:08 crc kubenswrapper[4875]: I1007 07:58:08.697253 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 07:58:08 crc kubenswrapper[4875]: E1007 07:58:08.697293 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-wk8rw" podUID="dce21abc-1295-4d45-bd26-07b7e37d674c" Oct 07 07:58:08 crc kubenswrapper[4875]: E1007 07:58:08.697334 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 07:58:09 crc kubenswrapper[4875]: I1007 07:58:09.697180 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 07:58:09 crc kubenswrapper[4875]: I1007 07:58:09.697374 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 07:58:09 crc kubenswrapper[4875]: E1007 07:58:09.697483 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 07:58:09 crc kubenswrapper[4875]: E1007 07:58:09.697584 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 07:58:10 crc kubenswrapper[4875]: I1007 07:58:10.697479 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 07:58:10 crc kubenswrapper[4875]: I1007 07:58:10.697575 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-wk8rw" Oct 07 07:58:10 crc kubenswrapper[4875]: E1007 07:58:10.697702 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 07:58:10 crc kubenswrapper[4875]: E1007 07:58:10.697829 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-wk8rw" podUID="dce21abc-1295-4d45-bd26-07b7e37d674c" Oct 07 07:58:11 crc kubenswrapper[4875]: I1007 07:58:11.325836 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-wc2jq_e5a790e1-c591-4cfc-930f-4805a923790b/kube-multus/1.log" Oct 07 07:58:11 crc kubenswrapper[4875]: I1007 07:58:11.326817 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-wc2jq_e5a790e1-c591-4cfc-930f-4805a923790b/kube-multus/0.log" Oct 07 07:58:11 crc kubenswrapper[4875]: I1007 07:58:11.326911 4875 generic.go:334] "Generic (PLEG): container finished" podID="e5a790e1-c591-4cfc-930f-4805a923790b" containerID="512ba91c00a610fe56d1abb8dd82712ba14ce76b4148fe0c69e8864c9a87f977" exitCode=1 Oct 07 07:58:11 crc kubenswrapper[4875]: I1007 07:58:11.326965 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-wc2jq" event={"ID":"e5a790e1-c591-4cfc-930f-4805a923790b","Type":"ContainerDied","Data":"512ba91c00a610fe56d1abb8dd82712ba14ce76b4148fe0c69e8864c9a87f977"} Oct 07 07:58:11 crc kubenswrapper[4875]: I1007 07:58:11.327028 4875 scope.go:117] "RemoveContainer" containerID="0fda9fd366641ca5799c0ca6c7aa926af6b41609e157c11a15b2077d4a5ce3aa" Oct 07 07:58:11 crc kubenswrapper[4875]: I1007 07:58:11.327614 4875 scope.go:117] "RemoveContainer" containerID="512ba91c00a610fe56d1abb8dd82712ba14ce76b4148fe0c69e8864c9a87f977" Oct 07 07:58:11 crc kubenswrapper[4875]: E1007 07:58:11.327834 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-multus pod=multus-wc2jq_openshift-multus(e5a790e1-c591-4cfc-930f-4805a923790b)\"" pod="openshift-multus/multus-wc2jq" podUID="e5a790e1-c591-4cfc-930f-4805a923790b" Oct 07 07:58:11 crc kubenswrapper[4875]: I1007 07:58:11.697345 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 07:58:11 crc kubenswrapper[4875]: I1007 07:58:11.697455 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 07:58:11 crc kubenswrapper[4875]: E1007 07:58:11.697470 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 07:58:11 crc kubenswrapper[4875]: E1007 07:58:11.698195 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 07:58:12 crc kubenswrapper[4875]: I1007 07:58:12.330773 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-wc2jq_e5a790e1-c591-4cfc-930f-4805a923790b/kube-multus/1.log" Oct 07 07:58:12 crc kubenswrapper[4875]: I1007 07:58:12.697280 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 07:58:12 crc kubenswrapper[4875]: I1007 07:58:12.697628 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-wk8rw" Oct 07 07:58:12 crc kubenswrapper[4875]: E1007 07:58:12.697736 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 07:58:12 crc kubenswrapper[4875]: E1007 07:58:12.697857 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-wk8rw" podUID="dce21abc-1295-4d45-bd26-07b7e37d674c" Oct 07 07:58:13 crc kubenswrapper[4875]: I1007 07:58:13.696692 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 07:58:13 crc kubenswrapper[4875]: I1007 07:58:13.696716 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 07:58:13 crc kubenswrapper[4875]: E1007 07:58:13.696823 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 07:58:13 crc kubenswrapper[4875]: E1007 07:58:13.696913 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 07:58:14 crc kubenswrapper[4875]: I1007 07:58:14.697501 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-wk8rw" Oct 07 07:58:14 crc kubenswrapper[4875]: I1007 07:58:14.697620 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 07:58:14 crc kubenswrapper[4875]: E1007 07:58:14.697701 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-wk8rw" podUID="dce21abc-1295-4d45-bd26-07b7e37d674c" Oct 07 07:58:14 crc kubenswrapper[4875]: E1007 07:58:14.697865 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 07:58:15 crc kubenswrapper[4875]: E1007 07:58:15.670331 4875 kubelet_node_status.go:497] "Node not becoming ready in time after startup" Oct 07 07:58:15 crc kubenswrapper[4875]: I1007 07:58:15.696496 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 07:58:15 crc kubenswrapper[4875]: I1007 07:58:15.696637 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 07:58:15 crc kubenswrapper[4875]: E1007 07:58:15.697945 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 07:58:15 crc kubenswrapper[4875]: E1007 07:58:15.698238 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 07:58:15 crc kubenswrapper[4875]: E1007 07:58:15.799036 4875 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Oct 07 07:58:16 crc kubenswrapper[4875]: I1007 07:58:16.696387 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 07:58:16 crc kubenswrapper[4875]: I1007 07:58:16.696426 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-wk8rw" Oct 07 07:58:16 crc kubenswrapper[4875]: E1007 07:58:16.696514 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 07:58:16 crc kubenswrapper[4875]: E1007 07:58:16.696566 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-wk8rw" podUID="dce21abc-1295-4d45-bd26-07b7e37d674c" Oct 07 07:58:17 crc kubenswrapper[4875]: I1007 07:58:17.697252 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 07:58:17 crc kubenswrapper[4875]: I1007 07:58:17.697422 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 07:58:17 crc kubenswrapper[4875]: E1007 07:58:17.697643 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 07:58:17 crc kubenswrapper[4875]: E1007 07:58:17.697789 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 07:58:18 crc kubenswrapper[4875]: I1007 07:58:18.697040 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-wk8rw" Oct 07 07:58:18 crc kubenswrapper[4875]: E1007 07:58:18.697181 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-wk8rw" podUID="dce21abc-1295-4d45-bd26-07b7e37d674c" Oct 07 07:58:18 crc kubenswrapper[4875]: I1007 07:58:18.697254 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 07:58:18 crc kubenswrapper[4875]: E1007 07:58:18.697380 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 07:58:19 crc kubenswrapper[4875]: I1007 07:58:19.697218 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 07:58:19 crc kubenswrapper[4875]: I1007 07:58:19.697285 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 07:58:19 crc kubenswrapper[4875]: E1007 07:58:19.697438 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 07:58:19 crc kubenswrapper[4875]: E1007 07:58:19.697656 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 07:58:20 crc kubenswrapper[4875]: I1007 07:58:20.696843 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 07:58:20 crc kubenswrapper[4875]: I1007 07:58:20.696939 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-wk8rw" Oct 07 07:58:20 crc kubenswrapper[4875]: E1007 07:58:20.697052 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 07:58:20 crc kubenswrapper[4875]: E1007 07:58:20.697131 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-wk8rw" podUID="dce21abc-1295-4d45-bd26-07b7e37d674c" Oct 07 07:58:20 crc kubenswrapper[4875]: E1007 07:58:20.800415 4875 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Oct 07 07:58:21 crc kubenswrapper[4875]: I1007 07:58:21.697198 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 07:58:21 crc kubenswrapper[4875]: I1007 07:58:21.697242 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 07:58:21 crc kubenswrapper[4875]: E1007 07:58:21.697413 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 07:58:21 crc kubenswrapper[4875]: E1007 07:58:21.697548 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 07:58:21 crc kubenswrapper[4875]: I1007 07:58:21.698445 4875 scope.go:117] "RemoveContainer" containerID="5d8ea2a287411218c24c1e8eaf4aeb64b4833e621385a4d8b32ffc0f79ce461f" Oct 07 07:58:22 crc kubenswrapper[4875]: I1007 07:58:22.366497 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-8tcxj_f7806e48-48e7-4680-af2e-e93a05003370/ovnkube-controller/3.log" Oct 07 07:58:22 crc kubenswrapper[4875]: I1007 07:58:22.370197 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-8tcxj" event={"ID":"f7806e48-48e7-4680-af2e-e93a05003370","Type":"ContainerStarted","Data":"62c8d1f45c0a13b9cdc9fed4836cfe12d101ad6a87b80b8e0c0e79c9cc239cf0"} Oct 07 07:58:22 crc kubenswrapper[4875]: I1007 07:58:22.370746 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-8tcxj" Oct 07 07:58:22 crc kubenswrapper[4875]: I1007 07:58:22.399582 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-8tcxj" podStartSLOduration=106.399565702 podStartE2EDuration="1m46.399565702s" podCreationTimestamp="2025-10-07 07:56:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 07:58:22.397453995 +0000 UTC m=+127.357224548" watchObservedRunningTime="2025-10-07 07:58:22.399565702 +0000 UTC m=+127.359336245" Oct 07 07:58:22 crc kubenswrapper[4875]: I1007 07:58:22.624041 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-wk8rw"] Oct 07 07:58:22 crc kubenswrapper[4875]: I1007 07:58:22.624155 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-wk8rw" Oct 07 07:58:22 crc kubenswrapper[4875]: E1007 07:58:22.624239 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-wk8rw" podUID="dce21abc-1295-4d45-bd26-07b7e37d674c" Oct 07 07:58:22 crc kubenswrapper[4875]: I1007 07:58:22.696553 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 07:58:22 crc kubenswrapper[4875]: E1007 07:58:22.696900 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 07:58:23 crc kubenswrapper[4875]: I1007 07:58:23.697403 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 07:58:23 crc kubenswrapper[4875]: I1007 07:58:23.697510 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 07:58:23 crc kubenswrapper[4875]: E1007 07:58:23.697722 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 07:58:23 crc kubenswrapper[4875]: E1007 07:58:23.698013 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 07:58:24 crc kubenswrapper[4875]: I1007 07:58:24.696670 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-wk8rw" Oct 07 07:58:24 crc kubenswrapper[4875]: I1007 07:58:24.696670 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 07:58:24 crc kubenswrapper[4875]: I1007 07:58:24.697001 4875 scope.go:117] "RemoveContainer" containerID="512ba91c00a610fe56d1abb8dd82712ba14ce76b4148fe0c69e8864c9a87f977" Oct 07 07:58:24 crc kubenswrapper[4875]: E1007 07:58:24.696995 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-wk8rw" podUID="dce21abc-1295-4d45-bd26-07b7e37d674c" Oct 07 07:58:24 crc kubenswrapper[4875]: E1007 07:58:24.697094 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 07:58:25 crc kubenswrapper[4875]: I1007 07:58:25.382966 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-wc2jq_e5a790e1-c591-4cfc-930f-4805a923790b/kube-multus/1.log" Oct 07 07:58:25 crc kubenswrapper[4875]: I1007 07:58:25.383241 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-wc2jq" event={"ID":"e5a790e1-c591-4cfc-930f-4805a923790b","Type":"ContainerStarted","Data":"84832734a0dd2b492cf2c766fc86cc44f732aa4c8da2c4a22df1693d00110352"} Oct 07 07:58:25 crc kubenswrapper[4875]: I1007 07:58:25.696494 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 07:58:25 crc kubenswrapper[4875]: I1007 07:58:25.696561 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 07:58:25 crc kubenswrapper[4875]: E1007 07:58:25.697462 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 07:58:25 crc kubenswrapper[4875]: E1007 07:58:25.697657 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 07:58:25 crc kubenswrapper[4875]: E1007 07:58:25.803580 4875 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Oct 07 07:58:26 crc kubenswrapper[4875]: I1007 07:58:26.696976 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 07:58:26 crc kubenswrapper[4875]: I1007 07:58:26.697377 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-wk8rw" Oct 07 07:58:26 crc kubenswrapper[4875]: E1007 07:58:26.697558 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 07:58:26 crc kubenswrapper[4875]: E1007 07:58:26.698280 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-wk8rw" podUID="dce21abc-1295-4d45-bd26-07b7e37d674c" Oct 07 07:58:27 crc kubenswrapper[4875]: I1007 07:58:27.696859 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 07:58:27 crc kubenswrapper[4875]: I1007 07:58:27.697042 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 07:58:27 crc kubenswrapper[4875]: E1007 07:58:27.697543 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 07:58:27 crc kubenswrapper[4875]: E1007 07:58:27.697659 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 07:58:28 crc kubenswrapper[4875]: I1007 07:58:28.697143 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-wk8rw" Oct 07 07:58:28 crc kubenswrapper[4875]: I1007 07:58:28.697182 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 07:58:28 crc kubenswrapper[4875]: E1007 07:58:28.697291 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-wk8rw" podUID="dce21abc-1295-4d45-bd26-07b7e37d674c" Oct 07 07:58:28 crc kubenswrapper[4875]: E1007 07:58:28.697373 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 07:58:29 crc kubenswrapper[4875]: I1007 07:58:29.696858 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 07:58:29 crc kubenswrapper[4875]: I1007 07:58:29.696963 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 07:58:29 crc kubenswrapper[4875]: E1007 07:58:29.697013 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 07:58:29 crc kubenswrapper[4875]: E1007 07:58:29.697074 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 07:58:30 crc kubenswrapper[4875]: I1007 07:58:30.697442 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 07:58:30 crc kubenswrapper[4875]: I1007 07:58:30.697524 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-wk8rw" Oct 07 07:58:30 crc kubenswrapper[4875]: E1007 07:58:30.697636 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 07:58:30 crc kubenswrapper[4875]: E1007 07:58:30.697722 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-wk8rw" podUID="dce21abc-1295-4d45-bd26-07b7e37d674c" Oct 07 07:58:31 crc kubenswrapper[4875]: I1007 07:58:31.697014 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 07:58:31 crc kubenswrapper[4875]: I1007 07:58:31.697024 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 07:58:31 crc kubenswrapper[4875]: I1007 07:58:31.699704 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Oct 07 07:58:31 crc kubenswrapper[4875]: I1007 07:58:31.701297 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Oct 07 07:58:31 crc kubenswrapper[4875]: I1007 07:58:31.701613 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Oct 07 07:58:31 crc kubenswrapper[4875]: I1007 07:58:31.701670 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Oct 07 07:58:32 crc kubenswrapper[4875]: I1007 07:58:32.697269 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 07:58:32 crc kubenswrapper[4875]: I1007 07:58:32.697281 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-wk8rw" Oct 07 07:58:32 crc kubenswrapper[4875]: I1007 07:58:32.699189 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Oct 07 07:58:32 crc kubenswrapper[4875]: I1007 07:58:32.700140 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.408938 4875 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeReady" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.447035 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-zpc28"] Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.447544 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-zpc28" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.450842 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.450917 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-7clsb"] Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.450991 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.451105 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.451539 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-7clsb" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.452299 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.452414 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.452475 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.453418 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-gk4f8"] Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.453813 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-gk4f8" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.454976 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.455066 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.455839 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-hlnjg"] Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.456228 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.456675 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-hlnjg" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.459353 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-machine-approver/machine-approver-56656f9798-q5g8q"] Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.460471 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-q5g8q" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.461144 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-wsp6s"] Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.461744 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-wsp6s" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.472482 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.472718 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.472903 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.472941 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.473035 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Oct 07 07:58:34 crc kubenswrapper[4875]: W1007 07:58:34.473055 4875 reflector.go:561] object-"openshift-controller-manager"/"client-ca": failed to list *v1.ConfigMap: configmaps "client-ca" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-controller-manager": no relationship found between node 'crc' and this object Oct 07 07:58:34 crc kubenswrapper[4875]: E1007 07:58:34.473084 4875 reflector.go:158] "Unhandled Error" err="object-\"openshift-controller-manager\"/\"client-ca\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"client-ca\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-controller-manager\": no relationship found between node 'crc' and this object" logger="UnhandledError" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.473119 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.473138 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.473198 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.473249 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.473308 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.473367 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Oct 07 07:58:34 crc kubenswrapper[4875]: W1007 07:58:34.473409 4875 reflector.go:561] object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c": failed to list *v1.Secret: secrets "openshift-controller-manager-sa-dockercfg-msq4c" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-controller-manager": no relationship found between node 'crc' and this object Oct 07 07:58:34 crc kubenswrapper[4875]: E1007 07:58:34.473449 4875 reflector.go:158] "Unhandled Error" err="object-\"openshift-controller-manager\"/\"openshift-controller-manager-sa-dockercfg-msq4c\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"openshift-controller-manager-sa-dockercfg-msq4c\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-controller-manager\": no relationship found between node 'crc' and this object" logger="UnhandledError" Oct 07 07:58:34 crc kubenswrapper[4875]: W1007 07:58:34.473471 4875 reflector.go:561] object-"openshift-controller-manager"/"openshift-global-ca": failed to list *v1.ConfigMap: configmaps "openshift-global-ca" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-controller-manager": no relationship found between node 'crc' and this object Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.473493 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Oct 07 07:58:34 crc kubenswrapper[4875]: E1007 07:58:34.473494 4875 reflector.go:158] "Unhandled Error" err="object-\"openshift-controller-manager\"/\"openshift-global-ca\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"openshift-global-ca\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-controller-manager\": no relationship found between node 'crc' and this object" logger="UnhandledError" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.473599 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.473666 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.473682 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Oct 07 07:58:34 crc kubenswrapper[4875]: W1007 07:58:34.473743 4875 reflector.go:561] object-"openshift-authentication"/"v4-0-config-user-template-error": failed to list *v1.Secret: secrets "v4-0-config-user-template-error" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-authentication": no relationship found between node 'crc' and this object Oct 07 07:58:34 crc kubenswrapper[4875]: E1007 07:58:34.473765 4875 reflector.go:158] "Unhandled Error" err="object-\"openshift-authentication\"/\"v4-0-config-user-template-error\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"v4-0-config-user-template-error\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-authentication\": no relationship found between node 'crc' and this object" logger="UnhandledError" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.473751 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.473783 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.473851 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.473932 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.473938 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.473954 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.473990 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.474025 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.474057 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.474122 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.474123 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.474187 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.474282 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.477158 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.478304 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.480085 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.480296 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.482245 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console-operator/console-operator-58897d9998-rtps2"] Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.482595 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-72b9h"] Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.482852 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-cf7b2"] Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.483112 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-cf7b2" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.483511 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-rtps2" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.484049 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-72b9h" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.486916 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.486959 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-gjnmj"] Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.505166 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-f9d7485db-wccw7"] Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.506332 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-gjnmj" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.508316 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/downloads-7954f5f757-4fthd"] Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.509264 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-wccw7" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.520817 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.521020 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.521704 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.522353 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.522521 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.522757 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.522855 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-4fthd" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.522944 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.523105 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.523171 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.523225 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.523287 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.523297 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.523380 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.523457 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.523561 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.523461 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.524039 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.524184 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.523289 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.524273 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.524361 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.524612 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.526713 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.526895 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.527000 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-zdjs2"] Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.528519 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-lqcfl"] Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.528931 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-lqcfl" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.529585 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-zpc28"] Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.530780 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-6svk5"] Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.531158 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-6svk5" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.531903 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-p2bx4"] Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.529839 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-zdjs2" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.532472 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-p2bx4" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.532925 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-mgh7p"] Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.533437 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-mgh7p" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.533987 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-pxkvz"] Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.534650 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-pxkvz" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.536867 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-8x4x6"] Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.537438 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-8x4x6" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.537855 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-57m8s"] Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.538795 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-57m8s" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.539385 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.539549 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.539698 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.539711 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.540124 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.540511 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.541002 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.541526 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.542018 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.542178 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.542512 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.542620 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.542717 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.542821 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.542924 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.543015 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.543103 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.543134 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.543192 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.543361 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.543464 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.543692 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.543910 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.545357 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.545373 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.545630 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.549014 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-cr4tx"] Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.549440 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-649xl"] Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.549787 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-b5q5p"] Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.550091 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-cr4tx" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.550779 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-649xl" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.553980 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.554327 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.563256 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-6gfc9"] Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.565912 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-wtjgx"] Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.568413 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-6gfc9" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.569538 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-b5q5p" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.598169 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.599148 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.599738 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-wrx6d"] Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.600165 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-qtl66"] Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.600662 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-wpqln"] Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.601030 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-wrx6d" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.601062 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-67qrc"] Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.601073 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-wtjgx" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.601153 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-wpqln" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.601433 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-qtl66" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.601572 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.601737 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-hs8xs"] Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.602061 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-8r27m"] Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.602487 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-8r27m" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.602561 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-67qrc" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.603116 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-hs8xs" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.603862 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.605633 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-97glv"] Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.606013 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-97glv" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.608897 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.611470 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-5mtmp"] Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.612094 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-5mtmp" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.612334 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29330385-lc8x5"] Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.612733 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29330385-lc8x5" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.614632 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-j9tjw"] Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.615076 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-j9tjw" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.615084 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress/router-default-5444994796-vpcnf"] Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.615680 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-vpcnf" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.616059 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-nrcsn"] Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.616442 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-nrcsn" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.617908 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-bnk9v"] Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.618415 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-bnk9v" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.618842 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-wsp6s"] Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.618742 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.620242 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-7clsb"] Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.621042 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-server-cx82k"] Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.621829 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-cx82k" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.622145 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-canary/ingress-canary-rffld"] Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.622341 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.622854 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-rffld" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.623498 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-hlnjg"] Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.625160 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kvf2h\" (UniqueName: \"kubernetes.io/projected/c26c9450-7ff8-4142-b007-01a00adbb28d-kube-api-access-kvf2h\") pod \"route-controller-manager-6576b87f9c-cf7b2\" (UID: \"c26c9450-7ff8-4142-b007-01a00adbb28d\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-cf7b2" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.625183 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-lqcfl"] Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.625238 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-4fthd"] Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.625190 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/fd8a9079-4c7c-467a-9e0d-fa0c2bc15482-console-serving-cert\") pod \"console-f9d7485db-wccw7\" (UID: \"fd8a9079-4c7c-467a-9e0d-fa0c2bc15482\") " pod="openshift-console/console-f9d7485db-wccw7" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.625774 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/ace64483-508c-4ffc-9519-0186712eea32-auth-proxy-config\") pod \"machine-approver-56656f9798-q5g8q\" (UID: \"ace64483-508c-4ffc-9519-0186712eea32\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-q5g8q" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.626499 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hgsng\" (UniqueName: \"kubernetes.io/projected/ddc50d0d-840b-41e8-821f-d4e97916051a-kube-api-access-hgsng\") pod \"package-server-manager-789f6589d5-57m8s\" (UID: \"ddc50d0d-840b-41e8-821f-d4e97916051a\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-57m8s" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.626535 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/de07f99e-f06a-4800-96f5-ecfebf9630f2-config\") pod \"controller-manager-879f6c89f-gk4f8\" (UID: \"de07f99e-f06a-4800-96f5-ecfebf9630f2\") " pod="openshift-controller-manager/controller-manager-879f6c89f-gk4f8" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.626570 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/88313132-e652-4c64-b607-5b806c93e153-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-hlnjg\" (UID: \"88313132-e652-4c64-b607-5b806c93e153\") " pod="openshift-authentication/oauth-openshift-558db77b4-hlnjg" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.626605 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d5c0d7ea-c293-4a7d-ba73-3fceeb8b6ea4-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-mgh7p\" (UID: \"d5c0d7ea-c293-4a7d-ba73-3fceeb8b6ea4\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-mgh7p" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.626624 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kk4gk\" (UniqueName: \"kubernetes.io/projected/ebf98430-6317-40a9-be26-84a817cfbf1e-kube-api-access-kk4gk\") pod \"console-operator-58897d9998-rtps2\" (UID: \"ebf98430-6317-40a9-be26-84a817cfbf1e\") " pod="openshift-console-operator/console-operator-58897d9998-rtps2" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.626644 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/e612704a-aea6-48f6-82c1-cee4e0e77859-encryption-config\") pod \"apiserver-76f77b778f-7clsb\" (UID: \"e612704a-aea6-48f6-82c1-cee4e0e77859\") " pod="openshift-apiserver/apiserver-76f77b778f-7clsb" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.626660 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/e612704a-aea6-48f6-82c1-cee4e0e77859-audit-dir\") pod \"apiserver-76f77b778f-7clsb\" (UID: \"e612704a-aea6-48f6-82c1-cee4e0e77859\") " pod="openshift-apiserver/apiserver-76f77b778f-7clsb" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.626676 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/fd8a9079-4c7c-467a-9e0d-fa0c2bc15482-trusted-ca-bundle\") pod \"console-f9d7485db-wccw7\" (UID: \"fd8a9079-4c7c-467a-9e0d-fa0c2bc15482\") " pod="openshift-console/console-f9d7485db-wccw7" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.626694 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/88313132-e652-4c64-b607-5b806c93e153-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-hlnjg\" (UID: \"88313132-e652-4c64-b607-5b806c93e153\") " pod="openshift-authentication/oauth-openshift-558db77b4-hlnjg" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.626712 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/88313132-e652-4c64-b607-5b806c93e153-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-hlnjg\" (UID: \"88313132-e652-4c64-b607-5b806c93e153\") " pod="openshift-authentication/oauth-openshift-558db77b4-hlnjg" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.626734 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/5679969e-90bf-49f0-b478-7312b6e13a05-images\") pod \"machine-api-operator-5694c8668f-zpc28\" (UID: \"5679969e-90bf-49f0-b478-7312b6e13a05\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-zpc28" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.626753 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/0d7070c6-e24b-4b05-b574-92b15066833c-profile-collector-cert\") pod \"catalog-operator-68c6474976-8x4x6\" (UID: \"0d7070c6-e24b-4b05-b574-92b15066833c\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-8x4x6" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.626768 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/e612704a-aea6-48f6-82c1-cee4e0e77859-image-import-ca\") pod \"apiserver-76f77b778f-7clsb\" (UID: \"e612704a-aea6-48f6-82c1-cee4e0e77859\") " pod="openshift-apiserver/apiserver-76f77b778f-7clsb" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.626785 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/ad43d1f1-e9d0-400f-87fc-d397aebd5473-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-6svk5\" (UID: \"ad43d1f1-e9d0-400f-87fc-d397aebd5473\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-6svk5" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.626803 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8aaee5e1-e8fd-430c-bc6b-7ac25c49f95b-serving-cert\") pod \"apiserver-7bbb656c7d-zdjs2\" (UID: \"8aaee5e1-e8fd-430c-bc6b-7ac25c49f95b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-zdjs2" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.626823 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ebf98430-6317-40a9-be26-84a817cfbf1e-serving-cert\") pod \"console-operator-58897d9998-rtps2\" (UID: \"ebf98430-6317-40a9-be26-84a817cfbf1e\") " pod="openshift-console-operator/console-operator-58897d9998-rtps2" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.626840 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nlw97\" (UniqueName: \"kubernetes.io/projected/5679969e-90bf-49f0-b478-7312b6e13a05-kube-api-access-nlw97\") pod \"machine-api-operator-5694c8668f-zpc28\" (UID: \"5679969e-90bf-49f0-b478-7312b6e13a05\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-zpc28" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.626858 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/de07f99e-f06a-4800-96f5-ecfebf9630f2-serving-cert\") pod \"controller-manager-879f6c89f-gk4f8\" (UID: \"de07f99e-f06a-4800-96f5-ecfebf9630f2\") " pod="openshift-controller-manager/controller-manager-879f6c89f-gk4f8" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.626894 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/fd8a9079-4c7c-467a-9e0d-fa0c2bc15482-service-ca\") pod \"console-f9d7485db-wccw7\" (UID: \"fd8a9079-4c7c-467a-9e0d-fa0c2bc15482\") " pod="openshift-console/console-f9d7485db-wccw7" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.626911 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/88313132-e652-4c64-b607-5b806c93e153-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-hlnjg\" (UID: \"88313132-e652-4c64-b607-5b806c93e153\") " pod="openshift-authentication/oauth-openshift-558db77b4-hlnjg" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.626927 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e612704a-aea6-48f6-82c1-cee4e0e77859-serving-cert\") pod \"apiserver-76f77b778f-7clsb\" (UID: \"e612704a-aea6-48f6-82c1-cee4e0e77859\") " pod="openshift-apiserver/apiserver-76f77b778f-7clsb" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.626942 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5ngbq\" (UniqueName: \"kubernetes.io/projected/ad43d1f1-e9d0-400f-87fc-d397aebd5473-kube-api-access-5ngbq\") pod \"cluster-image-registry-operator-dc59b4c8b-6svk5\" (UID: \"ad43d1f1-e9d0-400f-87fc-d397aebd5473\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-6svk5" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.626959 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/8aaee5e1-e8fd-430c-bc6b-7ac25c49f95b-encryption-config\") pod \"apiserver-7bbb656c7d-zdjs2\" (UID: \"8aaee5e1-e8fd-430c-bc6b-7ac25c49f95b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-zdjs2" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.626975 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/de07f99e-f06a-4800-96f5-ecfebf9630f2-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-gk4f8\" (UID: \"de07f99e-f06a-4800-96f5-ecfebf9630f2\") " pod="openshift-controller-manager/controller-manager-879f6c89f-gk4f8" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.626992 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lpdw7\" (UniqueName: \"kubernetes.io/projected/d5c0d7ea-c293-4a7d-ba73-3fceeb8b6ea4-kube-api-access-lpdw7\") pod \"kube-storage-version-migrator-operator-b67b599dd-mgh7p\" (UID: \"d5c0d7ea-c293-4a7d-ba73-3fceeb8b6ea4\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-mgh7p" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.627010 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x6qq9\" (UniqueName: \"kubernetes.io/projected/88313132-e652-4c64-b607-5b806c93e153-kube-api-access-x6qq9\") pod \"oauth-openshift-558db77b4-hlnjg\" (UID: \"88313132-e652-4c64-b607-5b806c93e153\") " pod="openshift-authentication/oauth-openshift-558db77b4-hlnjg" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.627024 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tg8wx\" (UniqueName: \"kubernetes.io/projected/0d7070c6-e24b-4b05-b574-92b15066833c-kube-api-access-tg8wx\") pod \"catalog-operator-68c6474976-8x4x6\" (UID: \"0d7070c6-e24b-4b05-b574-92b15066833c\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-8x4x6" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.627040 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5679969e-90bf-49f0-b478-7312b6e13a05-config\") pod \"machine-api-operator-5694c8668f-zpc28\" (UID: \"5679969e-90bf-49f0-b478-7312b6e13a05\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-zpc28" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.627054 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lkh8d\" (UniqueName: \"kubernetes.io/projected/97431ef0-70e0-4c93-9ebc-ba3c9823685f-kube-api-access-lkh8d\") pod \"downloads-7954f5f757-4fthd\" (UID: \"97431ef0-70e0-4c93-9ebc-ba3c9823685f\") " pod="openshift-console/downloads-7954f5f757-4fthd" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.627069 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/88313132-e652-4c64-b607-5b806c93e153-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-hlnjg\" (UID: \"88313132-e652-4c64-b607-5b806c93e153\") " pod="openshift-authentication/oauth-openshift-558db77b4-hlnjg" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.627084 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/e612704a-aea6-48f6-82c1-cee4e0e77859-node-pullsecrets\") pod \"apiserver-76f77b778f-7clsb\" (UID: \"e612704a-aea6-48f6-82c1-cee4e0e77859\") " pod="openshift-apiserver/apiserver-76f77b778f-7clsb" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.627099 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e89a2d49-34f1-4e24-8b62-5bd8bfe39b8b-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-wsp6s\" (UID: \"e89a2d49-34f1-4e24-8b62-5bd8bfe39b8b\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-wsp6s" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.627115 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/88313132-e652-4c64-b607-5b806c93e153-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-hlnjg\" (UID: \"88313132-e652-4c64-b607-5b806c93e153\") " pod="openshift-authentication/oauth-openshift-558db77b4-hlnjg" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.627131 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rq67v\" (UniqueName: \"kubernetes.io/projected/8aaee5e1-e8fd-430c-bc6b-7ac25c49f95b-kube-api-access-rq67v\") pod \"apiserver-7bbb656c7d-zdjs2\" (UID: \"8aaee5e1-e8fd-430c-bc6b-7ac25c49f95b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-zdjs2" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.627161 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tlqcw\" (UniqueName: \"kubernetes.io/projected/e612704a-aea6-48f6-82c1-cee4e0e77859-kube-api-access-tlqcw\") pod \"apiserver-76f77b778f-7clsb\" (UID: \"e612704a-aea6-48f6-82c1-cee4e0e77859\") " pod="openshift-apiserver/apiserver-76f77b778f-7clsb" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.627175 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/ad43d1f1-e9d0-400f-87fc-d397aebd5473-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-6svk5\" (UID: \"ad43d1f1-e9d0-400f-87fc-d397aebd5473\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-6svk5" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.627191 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/67bb7921-f940-46a4-80c8-aa1aeae3b33b-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-lqcfl\" (UID: \"67bb7921-f940-46a4-80c8-aa1aeae3b33b\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-lqcfl" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.627212 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/ddc50d0d-840b-41e8-821f-d4e97916051a-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-57m8s\" (UID: \"ddc50d0d-840b-41e8-821f-d4e97916051a\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-57m8s" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.627228 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/51cad0ca-3fc8-486e-a7e9-05c470121cb8-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-gjnmj\" (UID: \"51cad0ca-3fc8-486e-a7e9-05c470121cb8\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-gjnmj" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.627244 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/fd8a9079-4c7c-467a-9e0d-fa0c2bc15482-oauth-serving-cert\") pod \"console-f9d7485db-wccw7\" (UID: \"fd8a9079-4c7c-467a-9e0d-fa0c2bc15482\") " pod="openshift-console/console-f9d7485db-wccw7" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.627276 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/4e8107af-7b4e-44e0-88f3-47fa08be03fa-proxy-tls\") pod \"machine-config-operator-74547568cd-p2bx4\" (UID: \"4e8107af-7b4e-44e0-88f3-47fa08be03fa\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-p2bx4" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.627291 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/fd8a9079-4c7c-467a-9e0d-fa0c2bc15482-console-config\") pod \"console-f9d7485db-wccw7\" (UID: \"fd8a9079-4c7c-467a-9e0d-fa0c2bc15482\") " pod="openshift-console/console-f9d7485db-wccw7" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.627309 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/88313132-e652-4c64-b607-5b806c93e153-audit-dir\") pod \"oauth-openshift-558db77b4-hlnjg\" (UID: \"88313132-e652-4c64-b607-5b806c93e153\") " pod="openshift-authentication/oauth-openshift-558db77b4-hlnjg" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.627323 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/ace64483-508c-4ffc-9519-0186712eea32-machine-approver-tls\") pod \"machine-approver-56656f9798-q5g8q\" (UID: \"ace64483-508c-4ffc-9519-0186712eea32\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-q5g8q" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.627339 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ace64483-508c-4ffc-9519-0186712eea32-config\") pod \"machine-approver-56656f9798-q5g8q\" (UID: \"ace64483-508c-4ffc-9519-0186712eea32\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-q5g8q" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.627353 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/67bb7921-f940-46a4-80c8-aa1aeae3b33b-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-lqcfl\" (UID: \"67bb7921-f940-46a4-80c8-aa1aeae3b33b\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-lqcfl" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.627366 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/ebf98430-6317-40a9-be26-84a817cfbf1e-trusted-ca\") pod \"console-operator-58897d9998-rtps2\" (UID: \"ebf98430-6317-40a9-be26-84a817cfbf1e\") " pod="openshift-console-operator/console-operator-58897d9998-rtps2" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.627382 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gblh2\" (UniqueName: \"kubernetes.io/projected/e89a2d49-34f1-4e24-8b62-5bd8bfe39b8b-kube-api-access-gblh2\") pod \"openshift-apiserver-operator-796bbdcf4f-wsp6s\" (UID: \"e89a2d49-34f1-4e24-8b62-5bd8bfe39b8b\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-wsp6s" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.627396 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/fd8a9079-4c7c-467a-9e0d-fa0c2bc15482-console-oauth-config\") pod \"console-f9d7485db-wccw7\" (UID: \"fd8a9079-4c7c-467a-9e0d-fa0c2bc15482\") " pod="openshift-console/console-f9d7485db-wccw7" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.627411 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/4e8107af-7b4e-44e0-88f3-47fa08be03fa-images\") pod \"machine-config-operator-74547568cd-p2bx4\" (UID: \"4e8107af-7b4e-44e0-88f3-47fa08be03fa\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-p2bx4" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.627426 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/8aaee5e1-e8fd-430c-bc6b-7ac25c49f95b-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-zdjs2\" (UID: \"8aaee5e1-e8fd-430c-bc6b-7ac25c49f95b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-zdjs2" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.627440 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ebf98430-6317-40a9-be26-84a817cfbf1e-config\") pod \"console-operator-58897d9998-rtps2\" (UID: \"ebf98430-6317-40a9-be26-84a817cfbf1e\") " pod="openshift-console-operator/console-operator-58897d9998-rtps2" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.627455 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-846sr\" (UniqueName: \"kubernetes.io/projected/51cad0ca-3fc8-486e-a7e9-05c470121cb8-kube-api-access-846sr\") pod \"authentication-operator-69f744f599-gjnmj\" (UID: \"51cad0ca-3fc8-486e-a7e9-05c470121cb8\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-gjnmj" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.627469 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e89a2d49-34f1-4e24-8b62-5bd8bfe39b8b-config\") pod \"openshift-apiserver-operator-796bbdcf4f-wsp6s\" (UID: \"e89a2d49-34f1-4e24-8b62-5bd8bfe39b8b\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-wsp6s" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.627482 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/88313132-e652-4c64-b607-5b806c93e153-audit-policies\") pod \"oauth-openshift-558db77b4-hlnjg\" (UID: \"88313132-e652-4c64-b607-5b806c93e153\") " pod="openshift-authentication/oauth-openshift-558db77b4-hlnjg" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.627499 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/e612704a-aea6-48f6-82c1-cee4e0e77859-etcd-client\") pod \"apiserver-76f77b778f-7clsb\" (UID: \"e612704a-aea6-48f6-82c1-cee4e0e77859\") " pod="openshift-apiserver/apiserver-76f77b778f-7clsb" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.627515 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/e612704a-aea6-48f6-82c1-cee4e0e77859-etcd-serving-ca\") pod \"apiserver-76f77b778f-7clsb\" (UID: \"e612704a-aea6-48f6-82c1-cee4e0e77859\") " pod="openshift-apiserver/apiserver-76f77b778f-7clsb" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.627529 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/51cad0ca-3fc8-486e-a7e9-05c470121cb8-serving-cert\") pod \"authentication-operator-69f744f599-gjnmj\" (UID: \"51cad0ca-3fc8-486e-a7e9-05c470121cb8\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-gjnmj" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.627544 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e612704a-aea6-48f6-82c1-cee4e0e77859-trusted-ca-bundle\") pod \"apiserver-76f77b778f-7clsb\" (UID: \"e612704a-aea6-48f6-82c1-cee4e0e77859\") " pod="openshift-apiserver/apiserver-76f77b778f-7clsb" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.627559 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e612704a-aea6-48f6-82c1-cee4e0e77859-config\") pod \"apiserver-76f77b778f-7clsb\" (UID: \"e612704a-aea6-48f6-82c1-cee4e0e77859\") " pod="openshift-apiserver/apiserver-76f77b778f-7clsb" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.627573 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/ad43d1f1-e9d0-400f-87fc-d397aebd5473-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-6svk5\" (UID: \"ad43d1f1-e9d0-400f-87fc-d397aebd5473\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-6svk5" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.627588 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/8aaee5e1-e8fd-430c-bc6b-7ac25c49f95b-audit-policies\") pod \"apiserver-7bbb656c7d-zdjs2\" (UID: \"8aaee5e1-e8fd-430c-bc6b-7ac25c49f95b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-zdjs2" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.627604 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c26c9450-7ff8-4142-b007-01a00adbb28d-client-ca\") pod \"route-controller-manager-6576b87f9c-cf7b2\" (UID: \"c26c9450-7ff8-4142-b007-01a00adbb28d\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-cf7b2" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.627623 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/665d7a28-86ba-498f-a7b4-60d5c7e8ef8c-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-pxkvz\" (UID: \"665d7a28-86ba-498f-a7b4-60d5c7e8ef8c\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-pxkvz" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.627648 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/550c8dbf-53c3-44c9-87c8-d7bc275f384e-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-72b9h\" (UID: \"550c8dbf-53c3-44c9-87c8-d7bc275f384e\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-72b9h" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.627664 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/67bb7921-f940-46a4-80c8-aa1aeae3b33b-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-lqcfl\" (UID: \"67bb7921-f940-46a4-80c8-aa1aeae3b33b\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-lqcfl" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.627682 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c26c9450-7ff8-4142-b007-01a00adbb28d-config\") pod \"route-controller-manager-6576b87f9c-cf7b2\" (UID: \"c26c9450-7ff8-4142-b007-01a00adbb28d\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-cf7b2" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.627695 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/e612704a-aea6-48f6-82c1-cee4e0e77859-audit\") pod \"apiserver-76f77b778f-7clsb\" (UID: \"e612704a-aea6-48f6-82c1-cee4e0e77859\") " pod="openshift-apiserver/apiserver-76f77b778f-7clsb" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.627709 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d5c0d7ea-c293-4a7d-ba73-3fceeb8b6ea4-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-mgh7p\" (UID: \"d5c0d7ea-c293-4a7d-ba73-3fceeb8b6ea4\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-mgh7p" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.627723 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/88313132-e652-4c64-b607-5b806c93e153-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-hlnjg\" (UID: \"88313132-e652-4c64-b607-5b806c93e153\") " pod="openshift-authentication/oauth-openshift-558db77b4-hlnjg" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.627740 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/51cad0ca-3fc8-486e-a7e9-05c470121cb8-service-ca-bundle\") pod \"authentication-operator-69f744f599-gjnmj\" (UID: \"51cad0ca-3fc8-486e-a7e9-05c470121cb8\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-gjnmj" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.627755 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/8aaee5e1-e8fd-430c-bc6b-7ac25c49f95b-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-zdjs2\" (UID: \"8aaee5e1-e8fd-430c-bc6b-7ac25c49f95b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-zdjs2" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.627773 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/5679969e-90bf-49f0-b478-7312b6e13a05-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-zpc28\" (UID: \"5679969e-90bf-49f0-b478-7312b6e13a05\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-zpc28" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.627788 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xkzx6\" (UniqueName: \"kubernetes.io/projected/de07f99e-f06a-4800-96f5-ecfebf9630f2-kube-api-access-xkzx6\") pod \"controller-manager-879f6c89f-gk4f8\" (UID: \"de07f99e-f06a-4800-96f5-ecfebf9630f2\") " pod="openshift-controller-manager/controller-manager-879f6c89f-gk4f8" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.627804 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zxqkw\" (UniqueName: \"kubernetes.io/projected/ace64483-508c-4ffc-9519-0186712eea32-kube-api-access-zxqkw\") pod \"machine-approver-56656f9798-q5g8q\" (UID: \"ace64483-508c-4ffc-9519-0186712eea32\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-q5g8q" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.627818 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sq7bb\" (UniqueName: \"kubernetes.io/projected/550c8dbf-53c3-44c9-87c8-d7bc275f384e-kube-api-access-sq7bb\") pod \"cluster-samples-operator-665b6dd947-72b9h\" (UID: \"550c8dbf-53c3-44c9-87c8-d7bc275f384e\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-72b9h" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.627833 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/88313132-e652-4c64-b607-5b806c93e153-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-hlnjg\" (UID: \"88313132-e652-4c64-b607-5b806c93e153\") " pod="openshift-authentication/oauth-openshift-558db77b4-hlnjg" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.627848 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/88313132-e652-4c64-b607-5b806c93e153-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-hlnjg\" (UID: \"88313132-e652-4c64-b607-5b806c93e153\") " pod="openshift-authentication/oauth-openshift-558db77b4-hlnjg" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.626170 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-gk4f8"] Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.627864 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/0d7070c6-e24b-4b05-b574-92b15066833c-srv-cert\") pod \"catalog-operator-68c6474976-8x4x6\" (UID: \"0d7070c6-e24b-4b05-b574-92b15066833c\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-8x4x6" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.627894 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/de07f99e-f06a-4800-96f5-ecfebf9630f2-client-ca\") pod \"controller-manager-879f6c89f-gk4f8\" (UID: \"de07f99e-f06a-4800-96f5-ecfebf9630f2\") " pod="openshift-controller-manager/controller-manager-879f6c89f-gk4f8" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.627909 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7zsbf\" (UniqueName: \"kubernetes.io/projected/665d7a28-86ba-498f-a7b4-60d5c7e8ef8c-kube-api-access-7zsbf\") pod \"multus-admission-controller-857f4d67dd-pxkvz\" (UID: \"665d7a28-86ba-498f-a7b4-60d5c7e8ef8c\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-pxkvz" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.627926 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/51cad0ca-3fc8-486e-a7e9-05c470121cb8-config\") pod \"authentication-operator-69f744f599-gjnmj\" (UID: \"51cad0ca-3fc8-486e-a7e9-05c470121cb8\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-gjnmj" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.627940 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/8aaee5e1-e8fd-430c-bc6b-7ac25c49f95b-audit-dir\") pod \"apiserver-7bbb656c7d-zdjs2\" (UID: \"8aaee5e1-e8fd-430c-bc6b-7ac25c49f95b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-zdjs2" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.627956 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rxwxx\" (UniqueName: \"kubernetes.io/projected/fd8a9079-4c7c-467a-9e0d-fa0c2bc15482-kube-api-access-rxwxx\") pod \"console-f9d7485db-wccw7\" (UID: \"fd8a9079-4c7c-467a-9e0d-fa0c2bc15482\") " pod="openshift-console/console-f9d7485db-wccw7" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.627988 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-rtps2"] Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.628013 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/4e8107af-7b4e-44e0-88f3-47fa08be03fa-auth-proxy-config\") pod \"machine-config-operator-74547568cd-p2bx4\" (UID: \"4e8107af-7b4e-44e0-88f3-47fa08be03fa\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-p2bx4" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.628028 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/88313132-e652-4c64-b607-5b806c93e153-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-hlnjg\" (UID: \"88313132-e652-4c64-b607-5b806c93e153\") " pod="openshift-authentication/oauth-openshift-558db77b4-hlnjg" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.628408 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wv9n5\" (UniqueName: \"kubernetes.io/projected/4e8107af-7b4e-44e0-88f3-47fa08be03fa-kube-api-access-wv9n5\") pod \"machine-config-operator-74547568cd-p2bx4\" (UID: \"4e8107af-7b4e-44e0-88f3-47fa08be03fa\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-p2bx4" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.628487 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/8aaee5e1-e8fd-430c-bc6b-7ac25c49f95b-etcd-client\") pod \"apiserver-7bbb656c7d-zdjs2\" (UID: \"8aaee5e1-e8fd-430c-bc6b-7ac25c49f95b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-zdjs2" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.628519 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c26c9450-7ff8-4142-b007-01a00adbb28d-serving-cert\") pod \"route-controller-manager-6576b87f9c-cf7b2\" (UID: \"c26c9450-7ff8-4142-b007-01a00adbb28d\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-cf7b2" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.628663 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/88313132-e652-4c64-b607-5b806c93e153-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-hlnjg\" (UID: \"88313132-e652-4c64-b607-5b806c93e153\") " pod="openshift-authentication/oauth-openshift-558db77b4-hlnjg" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.629180 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-72b9h"] Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.629827 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.630561 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-57m8s"] Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.631901 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-mgh7p"] Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.633480 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-wccw7"] Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.635257 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-cr4tx"] Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.635452 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-wpqln"] Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.637156 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-6svk5"] Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.637853 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-649xl"] Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.642491 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-8x4x6"] Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.645264 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-zdjs2"] Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.646816 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-b5q5p"] Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.649219 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-97glv"] Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.650177 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.654172 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-8r27m"] Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.656389 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-cf7b2"] Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.658268 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-wrx6d"] Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.659574 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-p2bx4"] Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.660994 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-gjnmj"] Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.661985 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29330385-lc8x5"] Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.663359 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-6gfc9"] Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.664631 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-pxkvz"] Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.666118 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-67qrc"] Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.667649 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-wtjgx"] Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.668755 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-qtl66"] Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.669542 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.669955 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-5mtmp"] Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.671081 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-nrcsn"] Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.672464 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-rffld"] Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.673927 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-bnk9v"] Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.675293 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-hs8xs"] Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.676917 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-j9tjw"] Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.678289 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/dns-default-q7gd8"] Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.679003 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-q7gd8" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.679443 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-5khj4"] Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.680351 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-5khj4" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.680911 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-q7gd8"] Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.682205 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-5khj4"] Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.690291 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.709472 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.729502 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/0ee20e7b-f9fe-4717-bad6-16ca5936e100-cert\") pod \"ingress-canary-rffld\" (UID: \"0ee20e7b-f9fe-4717-bad6-16ca5936e100\") " pod="openshift-ingress-canary/ingress-canary-rffld" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.729553 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/9e48afb0-e9d1-4d51-a992-2de00ba12ec6-metrics-certs\") pod \"router-default-5444994796-vpcnf\" (UID: \"9e48afb0-e9d1-4d51-a992-2de00ba12ec6\") " pod="openshift-ingress/router-default-5444994796-vpcnf" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.729581 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/665d7a28-86ba-498f-a7b4-60d5c7e8ef8c-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-pxkvz\" (UID: \"665d7a28-86ba-498f-a7b4-60d5c7e8ef8c\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-pxkvz" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.729598 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e612704a-aea6-48f6-82c1-cee4e0e77859-config\") pod \"apiserver-76f77b778f-7clsb\" (UID: \"e612704a-aea6-48f6-82c1-cee4e0e77859\") " pod="openshift-apiserver/apiserver-76f77b778f-7clsb" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.729615 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/ad43d1f1-e9d0-400f-87fc-d397aebd5473-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-6svk5\" (UID: \"ad43d1f1-e9d0-400f-87fc-d397aebd5473\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-6svk5" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.729634 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/67bb7921-f940-46a4-80c8-aa1aeae3b33b-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-lqcfl\" (UID: \"67bb7921-f940-46a4-80c8-aa1aeae3b33b\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-lqcfl" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.729652 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/e612704a-aea6-48f6-82c1-cee4e0e77859-audit\") pod \"apiserver-76f77b778f-7clsb\" (UID: \"e612704a-aea6-48f6-82c1-cee4e0e77859\") " pod="openshift-apiserver/apiserver-76f77b778f-7clsb" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.729668 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/88313132-e652-4c64-b607-5b806c93e153-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-hlnjg\" (UID: \"88313132-e652-4c64-b607-5b806c93e153\") " pod="openshift-authentication/oauth-openshift-558db77b4-hlnjg" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.729686 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/b3dea818-360c-47a0-8b92-48cb0ab39cb4-metrics-tls\") pod \"ingress-operator-5b745b69d9-5mtmp\" (UID: \"b3dea818-360c-47a0-8b92-48cb0ab39cb4\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-5mtmp" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.729702 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/5679969e-90bf-49f0-b478-7312b6e13a05-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-zpc28\" (UID: \"5679969e-90bf-49f0-b478-7312b6e13a05\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-zpc28" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.729719 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xkzx6\" (UniqueName: \"kubernetes.io/projected/de07f99e-f06a-4800-96f5-ecfebf9630f2-kube-api-access-xkzx6\") pod \"controller-manager-879f6c89f-gk4f8\" (UID: \"de07f99e-f06a-4800-96f5-ecfebf9630f2\") " pod="openshift-controller-manager/controller-manager-879f6c89f-gk4f8" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.729736 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zxqkw\" (UniqueName: \"kubernetes.io/projected/ace64483-508c-4ffc-9519-0186712eea32-kube-api-access-zxqkw\") pod \"machine-approver-56656f9798-q5g8q\" (UID: \"ace64483-508c-4ffc-9519-0186712eea32\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-q5g8q" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.729756 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/51cad0ca-3fc8-486e-a7e9-05c470121cb8-service-ca-bundle\") pod \"authentication-operator-69f744f599-gjnmj\" (UID: \"51cad0ca-3fc8-486e-a7e9-05c470121cb8\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-gjnmj" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.729772 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/88313132-e652-4c64-b607-5b806c93e153-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-hlnjg\" (UID: \"88313132-e652-4c64-b607-5b806c93e153\") " pod="openshift-authentication/oauth-openshift-558db77b4-hlnjg" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.729787 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/0d7070c6-e24b-4b05-b574-92b15066833c-srv-cert\") pod \"catalog-operator-68c6474976-8x4x6\" (UID: \"0d7070c6-e24b-4b05-b574-92b15066833c\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-8x4x6" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.729802 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/de07f99e-f06a-4800-96f5-ecfebf9630f2-client-ca\") pod \"controller-manager-879f6c89f-gk4f8\" (UID: \"de07f99e-f06a-4800-96f5-ecfebf9630f2\") " pod="openshift-controller-manager/controller-manager-879f6c89f-gk4f8" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.729819 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/8aaee5e1-e8fd-430c-bc6b-7ac25c49f95b-audit-dir\") pod \"apiserver-7bbb656c7d-zdjs2\" (UID: \"8aaee5e1-e8fd-430c-bc6b-7ac25c49f95b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-zdjs2" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.729836 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rxwxx\" (UniqueName: \"kubernetes.io/projected/fd8a9079-4c7c-467a-9e0d-fa0c2bc15482-kube-api-access-rxwxx\") pod \"console-f9d7485db-wccw7\" (UID: \"fd8a9079-4c7c-467a-9e0d-fa0c2bc15482\") " pod="openshift-console/console-f9d7485db-wccw7" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.729852 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/4e8107af-7b4e-44e0-88f3-47fa08be03fa-auth-proxy-config\") pod \"machine-config-operator-74547568cd-p2bx4\" (UID: \"4e8107af-7b4e-44e0-88f3-47fa08be03fa\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-p2bx4" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.729894 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7zsbf\" (UniqueName: \"kubernetes.io/projected/665d7a28-86ba-498f-a7b4-60d5c7e8ef8c-kube-api-access-7zsbf\") pod \"multus-admission-controller-857f4d67dd-pxkvz\" (UID: \"665d7a28-86ba-498f-a7b4-60d5c7e8ef8c\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-pxkvz" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.729910 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/88313132-e652-4c64-b607-5b806c93e153-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-hlnjg\" (UID: \"88313132-e652-4c64-b607-5b806c93e153\") " pod="openshift-authentication/oauth-openshift-558db77b4-hlnjg" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.729934 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/2e36610d-9f23-4c7e-9980-fad03a7392d9-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-97glv\" (UID: \"2e36610d-9f23-4c7e-9980-fad03a7392d9\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-97glv" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.729962 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/83061376-ca6f-4ccc-8da0-bede4a497b4f-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-j9tjw\" (UID: \"83061376-ca6f-4ccc-8da0-bede4a497b4f\") " pod="openshift-marketplace/marketplace-operator-79b997595-j9tjw" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.730017 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/8aaee5e1-e8fd-430c-bc6b-7ac25c49f95b-etcd-client\") pod \"apiserver-7bbb656c7d-zdjs2\" (UID: \"8aaee5e1-e8fd-430c-bc6b-7ac25c49f95b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-zdjs2" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.730043 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c26c9450-7ff8-4142-b007-01a00adbb28d-serving-cert\") pod \"route-controller-manager-6576b87f9c-cf7b2\" (UID: \"c26c9450-7ff8-4142-b007-01a00adbb28d\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-cf7b2" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.730075 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/88313132-e652-4c64-b607-5b806c93e153-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-hlnjg\" (UID: \"88313132-e652-4c64-b607-5b806c93e153\") " pod="openshift-authentication/oauth-openshift-558db77b4-hlnjg" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.730095 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b3dea818-360c-47a0-8b92-48cb0ab39cb4-trusted-ca\") pod \"ingress-operator-5b745b69d9-5mtmp\" (UID: \"b3dea818-360c-47a0-8b92-48cb0ab39cb4\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-5mtmp" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.730123 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kvf2h\" (UniqueName: \"kubernetes.io/projected/c26c9450-7ff8-4142-b007-01a00adbb28d-kube-api-access-kvf2h\") pod \"route-controller-manager-6576b87f9c-cf7b2\" (UID: \"c26c9450-7ff8-4142-b007-01a00adbb28d\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-cf7b2" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.730147 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/88313132-e652-4c64-b607-5b806c93e153-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-hlnjg\" (UID: \"88313132-e652-4c64-b607-5b806c93e153\") " pod="openshift-authentication/oauth-openshift-558db77b4-hlnjg" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.730170 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kk4gk\" (UniqueName: \"kubernetes.io/projected/ebf98430-6317-40a9-be26-84a817cfbf1e-kube-api-access-kk4gk\") pod \"console-operator-58897d9998-rtps2\" (UID: \"ebf98430-6317-40a9-be26-84a817cfbf1e\") " pod="openshift-console-operator/console-operator-58897d9998-rtps2" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.730202 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/e612704a-aea6-48f6-82c1-cee4e0e77859-encryption-config\") pod \"apiserver-76f77b778f-7clsb\" (UID: \"e612704a-aea6-48f6-82c1-cee4e0e77859\") " pod="openshift-apiserver/apiserver-76f77b778f-7clsb" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.730225 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/88313132-e652-4c64-b607-5b806c93e153-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-hlnjg\" (UID: \"88313132-e652-4c64-b607-5b806c93e153\") " pod="openshift-authentication/oauth-openshift-558db77b4-hlnjg" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.730251 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vh86g\" (UniqueName: \"kubernetes.io/projected/eb7d772c-6ae9-4793-8727-653f80c2d8a0-kube-api-access-vh86g\") pod \"machine-config-controller-84d6567774-649xl\" (UID: \"eb7d772c-6ae9-4793-8727-653f80c2d8a0\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-649xl" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.730275 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/83061376-ca6f-4ccc-8da0-bede4a497b4f-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-j9tjw\" (UID: \"83061376-ca6f-4ccc-8da0-bede4a497b4f\") " pod="openshift-marketplace/marketplace-operator-79b997595-j9tjw" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.730296 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2e36610d-9f23-4c7e-9980-fad03a7392d9-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-97glv\" (UID: \"2e36610d-9f23-4c7e-9980-fad03a7392d9\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-97glv" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.730320 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/0d7070c6-e24b-4b05-b574-92b15066833c-profile-collector-cert\") pod \"catalog-operator-68c6474976-8x4x6\" (UID: \"0d7070c6-e24b-4b05-b574-92b15066833c\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-8x4x6" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.730343 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ebf98430-6317-40a9-be26-84a817cfbf1e-serving-cert\") pod \"console-operator-58897d9998-rtps2\" (UID: \"ebf98430-6317-40a9-be26-84a817cfbf1e\") " pod="openshift-console-operator/console-operator-58897d9998-rtps2" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.730365 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nlw97\" (UniqueName: \"kubernetes.io/projected/5679969e-90bf-49f0-b478-7312b6e13a05-kube-api-access-nlw97\") pod \"machine-api-operator-5694c8668f-zpc28\" (UID: \"5679969e-90bf-49f0-b478-7312b6e13a05\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-zpc28" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.730387 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/de07f99e-f06a-4800-96f5-ecfebf9630f2-serving-cert\") pod \"controller-manager-879f6c89f-gk4f8\" (UID: \"de07f99e-f06a-4800-96f5-ecfebf9630f2\") " pod="openshift-controller-manager/controller-manager-879f6c89f-gk4f8" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.730415 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/fd8a9079-4c7c-467a-9e0d-fa0c2bc15482-service-ca\") pod \"console-f9d7485db-wccw7\" (UID: \"fd8a9079-4c7c-467a-9e0d-fa0c2bc15482\") " pod="openshift-console/console-f9d7485db-wccw7" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.730447 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b405a105-6441-41b4-90c2-ee4f6e07fb68-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-hs8xs\" (UID: \"b405a105-6441-41b4-90c2-ee4f6e07fb68\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-hs8xs" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.730469 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kt7wr\" (UniqueName: \"kubernetes.io/projected/b405a105-6441-41b4-90c2-ee4f6e07fb68-kube-api-access-kt7wr\") pod \"openshift-controller-manager-operator-756b6f6bc6-hs8xs\" (UID: \"b405a105-6441-41b4-90c2-ee4f6e07fb68\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-hs8xs" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.730499 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/de07f99e-f06a-4800-96f5-ecfebf9630f2-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-gk4f8\" (UID: \"de07f99e-f06a-4800-96f5-ecfebf9630f2\") " pod="openshift-controller-manager/controller-manager-879f6c89f-gk4f8" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.730529 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lpdw7\" (UniqueName: \"kubernetes.io/projected/d5c0d7ea-c293-4a7d-ba73-3fceeb8b6ea4-kube-api-access-lpdw7\") pod \"kube-storage-version-migrator-operator-b67b599dd-mgh7p\" (UID: \"d5c0d7ea-c293-4a7d-ba73-3fceeb8b6ea4\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-mgh7p" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.730554 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lkh8d\" (UniqueName: \"kubernetes.io/projected/97431ef0-70e0-4c93-9ebc-ba3c9823685f-kube-api-access-lkh8d\") pod \"downloads-7954f5f757-4fthd\" (UID: \"97431ef0-70e0-4c93-9ebc-ba3c9823685f\") " pod="openshift-console/downloads-7954f5f757-4fthd" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.730577 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/e612704a-aea6-48f6-82c1-cee4e0e77859-node-pullsecrets\") pod \"apiserver-76f77b778f-7clsb\" (UID: \"e612704a-aea6-48f6-82c1-cee4e0e77859\") " pod="openshift-apiserver/apiserver-76f77b778f-7clsb" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.730598 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e89a2d49-34f1-4e24-8b62-5bd8bfe39b8b-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-wsp6s\" (UID: \"e89a2d49-34f1-4e24-8b62-5bd8bfe39b8b\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-wsp6s" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.730622 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rq67v\" (UniqueName: \"kubernetes.io/projected/8aaee5e1-e8fd-430c-bc6b-7ac25c49f95b-kube-api-access-rq67v\") pod \"apiserver-7bbb656c7d-zdjs2\" (UID: \"8aaee5e1-e8fd-430c-bc6b-7ac25c49f95b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-zdjs2" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.730645 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b405a105-6441-41b4-90c2-ee4f6e07fb68-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-hs8xs\" (UID: \"b405a105-6441-41b4-90c2-ee4f6e07fb68\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-hs8xs" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.730682 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/ad43d1f1-e9d0-400f-87fc-d397aebd5473-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-6svk5\" (UID: \"ad43d1f1-e9d0-400f-87fc-d397aebd5473\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-6svk5" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.730722 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/703e43ba-ed43-4ffc-92e3-0063568dcefc-profile-collector-cert\") pod \"olm-operator-6b444d44fb-nrcsn\" (UID: \"703e43ba-ed43-4ffc-92e3-0063568dcefc\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-nrcsn" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.730751 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/ddc50d0d-840b-41e8-821f-d4e97916051a-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-57m8s\" (UID: \"ddc50d0d-840b-41e8-821f-d4e97916051a\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-57m8s" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.730786 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k2pvh\" (UniqueName: \"kubernetes.io/projected/83061376-ca6f-4ccc-8da0-bede4a497b4f-kube-api-access-k2pvh\") pod \"marketplace-operator-79b997595-j9tjw\" (UID: \"83061376-ca6f-4ccc-8da0-bede4a497b4f\") " pod="openshift-marketplace/marketplace-operator-79b997595-j9tjw" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.730822 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/4e8107af-7b4e-44e0-88f3-47fa08be03fa-proxy-tls\") pod \"machine-config-operator-74547568cd-p2bx4\" (UID: \"4e8107af-7b4e-44e0-88f3-47fa08be03fa\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-p2bx4" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.730859 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/fd8a9079-4c7c-467a-9e0d-fa0c2bc15482-console-config\") pod \"console-f9d7485db-wccw7\" (UID: \"fd8a9079-4c7c-467a-9e0d-fa0c2bc15482\") " pod="openshift-console/console-f9d7485db-wccw7" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.730913 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/ace64483-508c-4ffc-9519-0186712eea32-machine-approver-tls\") pod \"machine-approver-56656f9798-q5g8q\" (UID: \"ace64483-508c-4ffc-9519-0186712eea32\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-q5g8q" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.730940 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ace64483-508c-4ffc-9519-0186712eea32-config\") pod \"machine-approver-56656f9798-q5g8q\" (UID: \"ace64483-508c-4ffc-9519-0186712eea32\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-q5g8q" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.730940 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/8aaee5e1-e8fd-430c-bc6b-7ac25c49f95b-audit-dir\") pod \"apiserver-7bbb656c7d-zdjs2\" (UID: \"8aaee5e1-e8fd-430c-bc6b-7ac25c49f95b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-zdjs2" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.730967 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c9prd\" (UniqueName: \"kubernetes.io/projected/0ee20e7b-f9fe-4717-bad6-16ca5936e100-kube-api-access-c9prd\") pod \"ingress-canary-rffld\" (UID: \"0ee20e7b-f9fe-4717-bad6-16ca5936e100\") " pod="openshift-ingress-canary/ingress-canary-rffld" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.730995 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gblh2\" (UniqueName: \"kubernetes.io/projected/e89a2d49-34f1-4e24-8b62-5bd8bfe39b8b-kube-api-access-gblh2\") pod \"openshift-apiserver-operator-796bbdcf4f-wsp6s\" (UID: \"e89a2d49-34f1-4e24-8b62-5bd8bfe39b8b\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-wsp6s" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.731020 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/4e8107af-7b4e-44e0-88f3-47fa08be03fa-images\") pod \"machine-config-operator-74547568cd-p2bx4\" (UID: \"4e8107af-7b4e-44e0-88f3-47fa08be03fa\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-p2bx4" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.731042 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/8aaee5e1-e8fd-430c-bc6b-7ac25c49f95b-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-zdjs2\" (UID: \"8aaee5e1-e8fd-430c-bc6b-7ac25c49f95b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-zdjs2" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.731066 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ebf98430-6317-40a9-be26-84a817cfbf1e-config\") pod \"console-operator-58897d9998-rtps2\" (UID: \"ebf98430-6317-40a9-be26-84a817cfbf1e\") " pod="openshift-console-operator/console-operator-58897d9998-rtps2" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.731077 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e612704a-aea6-48f6-82c1-cee4e0e77859-config\") pod \"apiserver-76f77b778f-7clsb\" (UID: \"e612704a-aea6-48f6-82c1-cee4e0e77859\") " pod="openshift-apiserver/apiserver-76f77b778f-7clsb" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.731090 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/ebf98430-6317-40a9-be26-84a817cfbf1e-trusted-ca\") pod \"console-operator-58897d9998-rtps2\" (UID: \"ebf98430-6317-40a9-be26-84a817cfbf1e\") " pod="openshift-console-operator/console-operator-58897d9998-rtps2" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.731115 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e89a2d49-34f1-4e24-8b62-5bd8bfe39b8b-config\") pod \"openshift-apiserver-operator-796bbdcf4f-wsp6s\" (UID: \"e89a2d49-34f1-4e24-8b62-5bd8bfe39b8b\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-wsp6s" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.731139 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/88313132-e652-4c64-b607-5b806c93e153-audit-policies\") pod \"oauth-openshift-558db77b4-hlnjg\" (UID: \"88313132-e652-4c64-b607-5b806c93e153\") " pod="openshift-authentication/oauth-openshift-558db77b4-hlnjg" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.731161 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/e612704a-aea6-48f6-82c1-cee4e0e77859-etcd-serving-ca\") pod \"apiserver-76f77b778f-7clsb\" (UID: \"e612704a-aea6-48f6-82c1-cee4e0e77859\") " pod="openshift-apiserver/apiserver-76f77b778f-7clsb" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.731219 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/51cad0ca-3fc8-486e-a7e9-05c470121cb8-serving-cert\") pod \"authentication-operator-69f744f599-gjnmj\" (UID: \"51cad0ca-3fc8-486e-a7e9-05c470121cb8\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-gjnmj" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.731248 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-846sr\" (UniqueName: \"kubernetes.io/projected/51cad0ca-3fc8-486e-a7e9-05c470121cb8-kube-api-access-846sr\") pod \"authentication-operator-69f744f599-gjnmj\" (UID: \"51cad0ca-3fc8-486e-a7e9-05c470121cb8\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-gjnmj" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.731272 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e612704a-aea6-48f6-82c1-cee4e0e77859-trusted-ca-bundle\") pod \"apiserver-76f77b778f-7clsb\" (UID: \"e612704a-aea6-48f6-82c1-cee4e0e77859\") " pod="openshift-apiserver/apiserver-76f77b778f-7clsb" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.731294 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m6j9v\" (UniqueName: \"kubernetes.io/projected/b3dea818-360c-47a0-8b92-48cb0ab39cb4-kube-api-access-m6j9v\") pod \"ingress-operator-5b745b69d9-5mtmp\" (UID: \"b3dea818-360c-47a0-8b92-48cb0ab39cb4\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-5mtmp" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.731319 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/8aaee5e1-e8fd-430c-bc6b-7ac25c49f95b-audit-policies\") pod \"apiserver-7bbb656c7d-zdjs2\" (UID: \"8aaee5e1-e8fd-430c-bc6b-7ac25c49f95b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-zdjs2" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.731343 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c26c9450-7ff8-4142-b007-01a00adbb28d-client-ca\") pod \"route-controller-manager-6576b87f9c-cf7b2\" (UID: \"c26c9450-7ff8-4142-b007-01a00adbb28d\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-cf7b2" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.731369 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/550c8dbf-53c3-44c9-87c8-d7bc275f384e-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-72b9h\" (UID: \"550c8dbf-53c3-44c9-87c8-d7bc275f384e\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-72b9h" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.731395 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c26c9450-7ff8-4142-b007-01a00adbb28d-config\") pod \"route-controller-manager-6576b87f9c-cf7b2\" (UID: \"c26c9450-7ff8-4142-b007-01a00adbb28d\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-cf7b2" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.731416 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d5c0d7ea-c293-4a7d-ba73-3fceeb8b6ea4-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-mgh7p\" (UID: \"d5c0d7ea-c293-4a7d-ba73-3fceeb8b6ea4\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-mgh7p" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.731442 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/8aaee5e1-e8fd-430c-bc6b-7ac25c49f95b-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-zdjs2\" (UID: \"8aaee5e1-e8fd-430c-bc6b-7ac25c49f95b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-zdjs2" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.731466 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/88313132-e652-4c64-b607-5b806c93e153-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-hlnjg\" (UID: \"88313132-e652-4c64-b607-5b806c93e153\") " pod="openshift-authentication/oauth-openshift-558db77b4-hlnjg" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.731487 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sq7bb\" (UniqueName: \"kubernetes.io/projected/550c8dbf-53c3-44c9-87c8-d7bc275f384e-kube-api-access-sq7bb\") pod \"cluster-samples-operator-665b6dd947-72b9h\" (UID: \"550c8dbf-53c3-44c9-87c8-d7bc275f384e\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-72b9h" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.731507 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/51cad0ca-3fc8-486e-a7e9-05c470121cb8-config\") pod \"authentication-operator-69f744f599-gjnmj\" (UID: \"51cad0ca-3fc8-486e-a7e9-05c470121cb8\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-gjnmj" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.731516 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.731544 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wv9n5\" (UniqueName: \"kubernetes.io/projected/4e8107af-7b4e-44e0-88f3-47fa08be03fa-kube-api-access-wv9n5\") pod \"machine-config-operator-74547568cd-p2bx4\" (UID: \"4e8107af-7b4e-44e0-88f3-47fa08be03fa\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-p2bx4" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.731570 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/4e8107af-7b4e-44e0-88f3-47fa08be03fa-auth-proxy-config\") pod \"machine-config-operator-74547568cd-p2bx4\" (UID: \"4e8107af-7b4e-44e0-88f3-47fa08be03fa\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-p2bx4" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.731580 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/fd8a9079-4c7c-467a-9e0d-fa0c2bc15482-console-serving-cert\") pod \"console-f9d7485db-wccw7\" (UID: \"fd8a9079-4c7c-467a-9e0d-fa0c2bc15482\") " pod="openshift-console/console-f9d7485db-wccw7" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.731636 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/9e48afb0-e9d1-4d51-a992-2de00ba12ec6-stats-auth\") pod \"router-default-5444994796-vpcnf\" (UID: \"9e48afb0-e9d1-4d51-a992-2de00ba12ec6\") " pod="openshift-ingress/router-default-5444994796-vpcnf" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.732001 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ace64483-508c-4ffc-9519-0186712eea32-config\") pod \"machine-approver-56656f9798-q5g8q\" (UID: \"ace64483-508c-4ffc-9519-0186712eea32\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-q5g8q" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.732657 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/88313132-e652-4c64-b607-5b806c93e153-audit-policies\") pod \"oauth-openshift-558db77b4-hlnjg\" (UID: \"88313132-e652-4c64-b607-5b806c93e153\") " pod="openshift-authentication/oauth-openshift-558db77b4-hlnjg" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.732705 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ebf98430-6317-40a9-be26-84a817cfbf1e-config\") pod \"console-operator-58897d9998-rtps2\" (UID: \"ebf98430-6317-40a9-be26-84a817cfbf1e\") " pod="openshift-console-operator/console-operator-58897d9998-rtps2" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.733352 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/51cad0ca-3fc8-486e-a7e9-05c470121cb8-config\") pod \"authentication-operator-69f744f599-gjnmj\" (UID: \"51cad0ca-3fc8-486e-a7e9-05c470121cb8\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-gjnmj" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.733595 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e612704a-aea6-48f6-82c1-cee4e0e77859-trusted-ca-bundle\") pod \"apiserver-76f77b778f-7clsb\" (UID: \"e612704a-aea6-48f6-82c1-cee4e0e77859\") " pod="openshift-apiserver/apiserver-76f77b778f-7clsb" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.734144 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/e612704a-aea6-48f6-82c1-cee4e0e77859-etcd-serving-ca\") pod \"apiserver-76f77b778f-7clsb\" (UID: \"e612704a-aea6-48f6-82c1-cee4e0e77859\") " pod="openshift-apiserver/apiserver-76f77b778f-7clsb" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.736819 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/88313132-e652-4c64-b607-5b806c93e153-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-hlnjg\" (UID: \"88313132-e652-4c64-b607-5b806c93e153\") " pod="openshift-authentication/oauth-openshift-558db77b4-hlnjg" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.737130 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c26c9450-7ff8-4142-b007-01a00adbb28d-serving-cert\") pod \"route-controller-manager-6576b87f9c-cf7b2\" (UID: \"c26c9450-7ff8-4142-b007-01a00adbb28d\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-cf7b2" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.737282 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/5679969e-90bf-49f0-b478-7312b6e13a05-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-zpc28\" (UID: \"5679969e-90bf-49f0-b478-7312b6e13a05\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-zpc28" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.737307 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/ad43d1f1-e9d0-400f-87fc-d397aebd5473-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-6svk5\" (UID: \"ad43d1f1-e9d0-400f-87fc-d397aebd5473\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-6svk5" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.737793 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/51cad0ca-3fc8-486e-a7e9-05c470121cb8-service-ca-bundle\") pod \"authentication-operator-69f744f599-gjnmj\" (UID: \"51cad0ca-3fc8-486e-a7e9-05c470121cb8\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-gjnmj" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.737969 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d5c0d7ea-c293-4a7d-ba73-3fceeb8b6ea4-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-mgh7p\" (UID: \"d5c0d7ea-c293-4a7d-ba73-3fceeb8b6ea4\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-mgh7p" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.737988 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/51cad0ca-3fc8-486e-a7e9-05c470121cb8-serving-cert\") pod \"authentication-operator-69f744f599-gjnmj\" (UID: \"51cad0ca-3fc8-486e-a7e9-05c470121cb8\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-gjnmj" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.738132 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/e612704a-aea6-48f6-82c1-cee4e0e77859-node-pullsecrets\") pod \"apiserver-76f77b778f-7clsb\" (UID: \"e612704a-aea6-48f6-82c1-cee4e0e77859\") " pod="openshift-apiserver/apiserver-76f77b778f-7clsb" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.738317 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/665d7a28-86ba-498f-a7b4-60d5c7e8ef8c-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-pxkvz\" (UID: \"665d7a28-86ba-498f-a7b4-60d5c7e8ef8c\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-pxkvz" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.738703 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/550c8dbf-53c3-44c9-87c8-d7bc275f384e-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-72b9h\" (UID: \"550c8dbf-53c3-44c9-87c8-d7bc275f384e\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-72b9h" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.738856 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/8aaee5e1-e8fd-430c-bc6b-7ac25c49f95b-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-zdjs2\" (UID: \"8aaee5e1-e8fd-430c-bc6b-7ac25c49f95b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-zdjs2" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.738866 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/e612704a-aea6-48f6-82c1-cee4e0e77859-audit\") pod \"apiserver-76f77b778f-7clsb\" (UID: \"e612704a-aea6-48f6-82c1-cee4e0e77859\") " pod="openshift-apiserver/apiserver-76f77b778f-7clsb" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.738947 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/ace64483-508c-4ffc-9519-0186712eea32-machine-approver-tls\") pod \"machine-approver-56656f9798-q5g8q\" (UID: \"ace64483-508c-4ffc-9519-0186712eea32\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-q5g8q" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.739050 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/ace64483-508c-4ffc-9519-0186712eea32-auth-proxy-config\") pod \"machine-approver-56656f9798-q5g8q\" (UID: \"ace64483-508c-4ffc-9519-0186712eea32\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-q5g8q" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.739072 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/de07f99e-f06a-4800-96f5-ecfebf9630f2-config\") pod \"controller-manager-879f6c89f-gk4f8\" (UID: \"de07f99e-f06a-4800-96f5-ecfebf9630f2\") " pod="openshift-controller-manager/controller-manager-879f6c89f-gk4f8" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.739094 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f363016c-d18e-4d16-96e3-35b871d6f130-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-bnk9v\" (UID: \"f363016c-d18e-4d16-96e3-35b871d6f130\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-bnk9v" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.739119 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hgsng\" (UniqueName: \"kubernetes.io/projected/ddc50d0d-840b-41e8-821f-d4e97916051a-kube-api-access-hgsng\") pod \"package-server-manager-789f6589d5-57m8s\" (UID: \"ddc50d0d-840b-41e8-821f-d4e97916051a\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-57m8s" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.739142 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d5c0d7ea-c293-4a7d-ba73-3fceeb8b6ea4-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-mgh7p\" (UID: \"d5c0d7ea-c293-4a7d-ba73-3fceeb8b6ea4\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-mgh7p" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.739160 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/fd8a9079-4c7c-467a-9e0d-fa0c2bc15482-trusted-ca-bundle\") pod \"console-f9d7485db-wccw7\" (UID: \"fd8a9079-4c7c-467a-9e0d-fa0c2bc15482\") " pod="openshift-console/console-f9d7485db-wccw7" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.739179 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/88313132-e652-4c64-b607-5b806c93e153-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-hlnjg\" (UID: \"88313132-e652-4c64-b607-5b806c93e153\") " pod="openshift-authentication/oauth-openshift-558db77b4-hlnjg" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.739200 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/e612704a-aea6-48f6-82c1-cee4e0e77859-audit-dir\") pod \"apiserver-76f77b778f-7clsb\" (UID: \"e612704a-aea6-48f6-82c1-cee4e0e77859\") " pod="openshift-apiserver/apiserver-76f77b778f-7clsb" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.739219 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/5679969e-90bf-49f0-b478-7312b6e13a05-images\") pod \"machine-api-operator-5694c8668f-zpc28\" (UID: \"5679969e-90bf-49f0-b478-7312b6e13a05\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-zpc28" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.739238 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nn2tf\" (UniqueName: \"kubernetes.io/projected/703e43ba-ed43-4ffc-92e3-0063568dcefc-kube-api-access-nn2tf\") pod \"olm-operator-6b444d44fb-nrcsn\" (UID: \"703e43ba-ed43-4ffc-92e3-0063568dcefc\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-nrcsn" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.739261 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/e612704a-aea6-48f6-82c1-cee4e0e77859-image-import-ca\") pod \"apiserver-76f77b778f-7clsb\" (UID: \"e612704a-aea6-48f6-82c1-cee4e0e77859\") " pod="openshift-apiserver/apiserver-76f77b778f-7clsb" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.739281 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/ad43d1f1-e9d0-400f-87fc-d397aebd5473-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-6svk5\" (UID: \"ad43d1f1-e9d0-400f-87fc-d397aebd5473\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-6svk5" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.739298 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8aaee5e1-e8fd-430c-bc6b-7ac25c49f95b-serving-cert\") pod \"apiserver-7bbb656c7d-zdjs2\" (UID: \"8aaee5e1-e8fd-430c-bc6b-7ac25c49f95b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-zdjs2" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.739316 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q9mfz\" (UniqueName: \"kubernetes.io/projected/9e48afb0-e9d1-4d51-a992-2de00ba12ec6-kube-api-access-q9mfz\") pod \"router-default-5444994796-vpcnf\" (UID: \"9e48afb0-e9d1-4d51-a992-2de00ba12ec6\") " pod="openshift-ingress/router-default-5444994796-vpcnf" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.739362 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/8aaee5e1-e8fd-430c-bc6b-7ac25c49f95b-audit-policies\") pod \"apiserver-7bbb656c7d-zdjs2\" (UID: \"8aaee5e1-e8fd-430c-bc6b-7ac25c49f95b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-zdjs2" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.739420 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2e36610d-9f23-4c7e-9980-fad03a7392d9-config\") pod \"kube-controller-manager-operator-78b949d7b-97glv\" (UID: \"2e36610d-9f23-4c7e-9980-fad03a7392d9\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-97glv" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.739454 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/eb7d772c-6ae9-4793-8727-653f80c2d8a0-proxy-tls\") pod \"machine-config-controller-84d6567774-649xl\" (UID: \"eb7d772c-6ae9-4793-8727-653f80c2d8a0\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-649xl" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.739484 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e612704a-aea6-48f6-82c1-cee4e0e77859-serving-cert\") pod \"apiserver-76f77b778f-7clsb\" (UID: \"e612704a-aea6-48f6-82c1-cee4e0e77859\") " pod="openshift-apiserver/apiserver-76f77b778f-7clsb" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.739515 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5ngbq\" (UniqueName: \"kubernetes.io/projected/ad43d1f1-e9d0-400f-87fc-d397aebd5473-kube-api-access-5ngbq\") pod \"cluster-image-registry-operator-dc59b4c8b-6svk5\" (UID: \"ad43d1f1-e9d0-400f-87fc-d397aebd5473\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-6svk5" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.739544 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/8aaee5e1-e8fd-430c-bc6b-7ac25c49f95b-encryption-config\") pod \"apiserver-7bbb656c7d-zdjs2\" (UID: \"8aaee5e1-e8fd-430c-bc6b-7ac25c49f95b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-zdjs2" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.739572 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/88313132-e652-4c64-b607-5b806c93e153-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-hlnjg\" (UID: \"88313132-e652-4c64-b607-5b806c93e153\") " pod="openshift-authentication/oauth-openshift-558db77b4-hlnjg" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.739601 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/703e43ba-ed43-4ffc-92e3-0063568dcefc-srv-cert\") pod \"olm-operator-6b444d44fb-nrcsn\" (UID: \"703e43ba-ed43-4ffc-92e3-0063568dcefc\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-nrcsn" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.739629 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f363016c-d18e-4d16-96e3-35b871d6f130-config\") pod \"kube-apiserver-operator-766d6c64bb-bnk9v\" (UID: \"f363016c-d18e-4d16-96e3-35b871d6f130\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-bnk9v" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.739656 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tg8wx\" (UniqueName: \"kubernetes.io/projected/0d7070c6-e24b-4b05-b574-92b15066833c-kube-api-access-tg8wx\") pod \"catalog-operator-68c6474976-8x4x6\" (UID: \"0d7070c6-e24b-4b05-b574-92b15066833c\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-8x4x6" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.739683 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5679969e-90bf-49f0-b478-7312b6e13a05-config\") pod \"machine-api-operator-5694c8668f-zpc28\" (UID: \"5679969e-90bf-49f0-b478-7312b6e13a05\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-zpc28" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.739707 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/88313132-e652-4c64-b607-5b806c93e153-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-hlnjg\" (UID: \"88313132-e652-4c64-b607-5b806c93e153\") " pod="openshift-authentication/oauth-openshift-558db77b4-hlnjg" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.739731 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x6qq9\" (UniqueName: \"kubernetes.io/projected/88313132-e652-4c64-b607-5b806c93e153-kube-api-access-x6qq9\") pod \"oauth-openshift-558db77b4-hlnjg\" (UID: \"88313132-e652-4c64-b607-5b806c93e153\") " pod="openshift-authentication/oauth-openshift-558db77b4-hlnjg" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.739754 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/88313132-e652-4c64-b607-5b806c93e153-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-hlnjg\" (UID: \"88313132-e652-4c64-b607-5b806c93e153\") " pod="openshift-authentication/oauth-openshift-558db77b4-hlnjg" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.739783 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/eb7d772c-6ae9-4793-8727-653f80c2d8a0-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-649xl\" (UID: \"eb7d772c-6ae9-4793-8727-653f80c2d8a0\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-649xl" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.739810 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tlqcw\" (UniqueName: \"kubernetes.io/projected/e612704a-aea6-48f6-82c1-cee4e0e77859-kube-api-access-tlqcw\") pod \"apiserver-76f77b778f-7clsb\" (UID: \"e612704a-aea6-48f6-82c1-cee4e0e77859\") " pod="openshift-apiserver/apiserver-76f77b778f-7clsb" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.739839 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/67bb7921-f940-46a4-80c8-aa1aeae3b33b-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-lqcfl\" (UID: \"67bb7921-f940-46a4-80c8-aa1aeae3b33b\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-lqcfl" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.739863 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/51cad0ca-3fc8-486e-a7e9-05c470121cb8-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-gjnmj\" (UID: \"51cad0ca-3fc8-486e-a7e9-05c470121cb8\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-gjnmj" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.739904 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/88313132-e652-4c64-b607-5b806c93e153-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-hlnjg\" (UID: \"88313132-e652-4c64-b607-5b806c93e153\") " pod="openshift-authentication/oauth-openshift-558db77b4-hlnjg" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.739920 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/fd8a9079-4c7c-467a-9e0d-fa0c2bc15482-oauth-serving-cert\") pod \"console-f9d7485db-wccw7\" (UID: \"fd8a9079-4c7c-467a-9e0d-fa0c2bc15482\") " pod="openshift-console/console-f9d7485db-wccw7" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.739952 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/9e48afb0-e9d1-4d51-a992-2de00ba12ec6-service-ca-bundle\") pod \"router-default-5444994796-vpcnf\" (UID: \"9e48afb0-e9d1-4d51-a992-2de00ba12ec6\") " pod="openshift-ingress/router-default-5444994796-vpcnf" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.739977 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f363016c-d18e-4d16-96e3-35b871d6f130-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-bnk9v\" (UID: \"f363016c-d18e-4d16-96e3-35b871d6f130\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-bnk9v" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.740000 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/b3dea818-360c-47a0-8b92-48cb0ab39cb4-bound-sa-token\") pod \"ingress-operator-5b745b69d9-5mtmp\" (UID: \"b3dea818-360c-47a0-8b92-48cb0ab39cb4\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-5mtmp" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.740031 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/88313132-e652-4c64-b607-5b806c93e153-audit-dir\") pod \"oauth-openshift-558db77b4-hlnjg\" (UID: \"88313132-e652-4c64-b607-5b806c93e153\") " pod="openshift-authentication/oauth-openshift-558db77b4-hlnjg" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.740054 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/67bb7921-f940-46a4-80c8-aa1aeae3b33b-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-lqcfl\" (UID: \"67bb7921-f940-46a4-80c8-aa1aeae3b33b\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-lqcfl" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.740176 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/88313132-e652-4c64-b607-5b806c93e153-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-hlnjg\" (UID: \"88313132-e652-4c64-b607-5b806c93e153\") " pod="openshift-authentication/oauth-openshift-558db77b4-hlnjg" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.740306 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/fd8a9079-4c7c-467a-9e0d-fa0c2bc15482-console-oauth-config\") pod \"console-f9d7485db-wccw7\" (UID: \"fd8a9079-4c7c-467a-9e0d-fa0c2bc15482\") " pod="openshift-console/console-f9d7485db-wccw7" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.740339 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/9e48afb0-e9d1-4d51-a992-2de00ba12ec6-default-certificate\") pod \"router-default-5444994796-vpcnf\" (UID: \"9e48afb0-e9d1-4d51-a992-2de00ba12ec6\") " pod="openshift-ingress/router-default-5444994796-vpcnf" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.740367 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/e612704a-aea6-48f6-82c1-cee4e0e77859-etcd-client\") pod \"apiserver-76f77b778f-7clsb\" (UID: \"e612704a-aea6-48f6-82c1-cee4e0e77859\") " pod="openshift-apiserver/apiserver-76f77b778f-7clsb" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.740803 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/67bb7921-f940-46a4-80c8-aa1aeae3b33b-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-lqcfl\" (UID: \"67bb7921-f940-46a4-80c8-aa1aeae3b33b\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-lqcfl" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.741166 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/fd8a9079-4c7c-467a-9e0d-fa0c2bc15482-oauth-serving-cert\") pod \"console-f9d7485db-wccw7\" (UID: \"fd8a9079-4c7c-467a-9e0d-fa0c2bc15482\") " pod="openshift-console/console-f9d7485db-wccw7" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.741261 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/4e8107af-7b4e-44e0-88f3-47fa08be03fa-images\") pod \"machine-config-operator-74547568cd-p2bx4\" (UID: \"4e8107af-7b4e-44e0-88f3-47fa08be03fa\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-p2bx4" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.741721 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/88313132-e652-4c64-b607-5b806c93e153-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-hlnjg\" (UID: \"88313132-e652-4c64-b607-5b806c93e153\") " pod="openshift-authentication/oauth-openshift-558db77b4-hlnjg" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.741851 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/67bb7921-f940-46a4-80c8-aa1aeae3b33b-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-lqcfl\" (UID: \"67bb7921-f940-46a4-80c8-aa1aeae3b33b\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-lqcfl" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.741997 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/88313132-e652-4c64-b607-5b806c93e153-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-hlnjg\" (UID: \"88313132-e652-4c64-b607-5b806c93e153\") " pod="openshift-authentication/oauth-openshift-558db77b4-hlnjg" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.742044 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/e612704a-aea6-48f6-82c1-cee4e0e77859-audit-dir\") pod \"apiserver-76f77b778f-7clsb\" (UID: \"e612704a-aea6-48f6-82c1-cee4e0e77859\") " pod="openshift-apiserver/apiserver-76f77b778f-7clsb" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.742400 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/fd8a9079-4c7c-467a-9e0d-fa0c2bc15482-console-serving-cert\") pod \"console-f9d7485db-wccw7\" (UID: \"fd8a9079-4c7c-467a-9e0d-fa0c2bc15482\") " pod="openshift-console/console-f9d7485db-wccw7" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.742464 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8aaee5e1-e8fd-430c-bc6b-7ac25c49f95b-serving-cert\") pod \"apiserver-7bbb656c7d-zdjs2\" (UID: \"8aaee5e1-e8fd-430c-bc6b-7ac25c49f95b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-zdjs2" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.742698 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c26c9450-7ff8-4142-b007-01a00adbb28d-config\") pod \"route-controller-manager-6576b87f9c-cf7b2\" (UID: \"c26c9450-7ff8-4142-b007-01a00adbb28d\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-cf7b2" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.742889 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/88313132-e652-4c64-b607-5b806c93e153-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-hlnjg\" (UID: \"88313132-e652-4c64-b607-5b806c93e153\") " pod="openshift-authentication/oauth-openshift-558db77b4-hlnjg" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.742916 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/fd8a9079-4c7c-467a-9e0d-fa0c2bc15482-trusted-ca-bundle\") pod \"console-f9d7485db-wccw7\" (UID: \"fd8a9079-4c7c-467a-9e0d-fa0c2bc15482\") " pod="openshift-console/console-f9d7485db-wccw7" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.743268 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d5c0d7ea-c293-4a7d-ba73-3fceeb8b6ea4-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-mgh7p\" (UID: \"d5c0d7ea-c293-4a7d-ba73-3fceeb8b6ea4\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-mgh7p" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.743302 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/de07f99e-f06a-4800-96f5-ecfebf9630f2-serving-cert\") pod \"controller-manager-879f6c89f-gk4f8\" (UID: \"de07f99e-f06a-4800-96f5-ecfebf9630f2\") " pod="openshift-controller-manager/controller-manager-879f6c89f-gk4f8" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.743529 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/5679969e-90bf-49f0-b478-7312b6e13a05-images\") pod \"machine-api-operator-5694c8668f-zpc28\" (UID: \"5679969e-90bf-49f0-b478-7312b6e13a05\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-zpc28" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.743601 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/88313132-e652-4c64-b607-5b806c93e153-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-hlnjg\" (UID: \"88313132-e652-4c64-b607-5b806c93e153\") " pod="openshift-authentication/oauth-openshift-558db77b4-hlnjg" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.743619 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5679969e-90bf-49f0-b478-7312b6e13a05-config\") pod \"machine-api-operator-5694c8668f-zpc28\" (UID: \"5679969e-90bf-49f0-b478-7312b6e13a05\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-zpc28" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.743858 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/ace64483-508c-4ffc-9519-0186712eea32-auth-proxy-config\") pod \"machine-approver-56656f9798-q5g8q\" (UID: \"ace64483-508c-4ffc-9519-0186712eea32\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-q5g8q" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.744124 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/8aaee5e1-e8fd-430c-bc6b-7ac25c49f95b-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-zdjs2\" (UID: \"8aaee5e1-e8fd-430c-bc6b-7ac25c49f95b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-zdjs2" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.744168 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c26c9450-7ff8-4142-b007-01a00adbb28d-client-ca\") pod \"route-controller-manager-6576b87f9c-cf7b2\" (UID: \"c26c9450-7ff8-4142-b007-01a00adbb28d\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-cf7b2" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.744219 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/88313132-e652-4c64-b607-5b806c93e153-audit-dir\") pod \"oauth-openshift-558db77b4-hlnjg\" (UID: \"88313132-e652-4c64-b607-5b806c93e153\") " pod="openshift-authentication/oauth-openshift-558db77b4-hlnjg" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.744409 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/fd8a9079-4c7c-467a-9e0d-fa0c2bc15482-service-ca\") pod \"console-f9d7485db-wccw7\" (UID: \"fd8a9079-4c7c-467a-9e0d-fa0c2bc15482\") " pod="openshift-console/console-f9d7485db-wccw7" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.744428 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/ebf98430-6317-40a9-be26-84a817cfbf1e-trusted-ca\") pod \"console-operator-58897d9998-rtps2\" (UID: \"ebf98430-6317-40a9-be26-84a817cfbf1e\") " pod="openshift-console-operator/console-operator-58897d9998-rtps2" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.744491 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e89a2d49-34f1-4e24-8b62-5bd8bfe39b8b-config\") pod \"openshift-apiserver-operator-796bbdcf4f-wsp6s\" (UID: \"e89a2d49-34f1-4e24-8b62-5bd8bfe39b8b\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-wsp6s" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.744625 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/e612704a-aea6-48f6-82c1-cee4e0e77859-encryption-config\") pod \"apiserver-76f77b778f-7clsb\" (UID: \"e612704a-aea6-48f6-82c1-cee4e0e77859\") " pod="openshift-apiserver/apiserver-76f77b778f-7clsb" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.745128 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/e612704a-aea6-48f6-82c1-cee4e0e77859-image-import-ca\") pod \"apiserver-76f77b778f-7clsb\" (UID: \"e612704a-aea6-48f6-82c1-cee4e0e77859\") " pod="openshift-apiserver/apiserver-76f77b778f-7clsb" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.745231 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/e612704a-aea6-48f6-82c1-cee4e0e77859-etcd-client\") pod \"apiserver-76f77b778f-7clsb\" (UID: \"e612704a-aea6-48f6-82c1-cee4e0e77859\") " pod="openshift-apiserver/apiserver-76f77b778f-7clsb" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.745262 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/fd8a9079-4c7c-467a-9e0d-fa0c2bc15482-console-config\") pod \"console-f9d7485db-wccw7\" (UID: \"fd8a9079-4c7c-467a-9e0d-fa0c2bc15482\") " pod="openshift-console/console-f9d7485db-wccw7" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.745365 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/de07f99e-f06a-4800-96f5-ecfebf9630f2-config\") pod \"controller-manager-879f6c89f-gk4f8\" (UID: \"de07f99e-f06a-4800-96f5-ecfebf9630f2\") " pod="openshift-controller-manager/controller-manager-879f6c89f-gk4f8" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.745607 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e612704a-aea6-48f6-82c1-cee4e0e77859-serving-cert\") pod \"apiserver-76f77b778f-7clsb\" (UID: \"e612704a-aea6-48f6-82c1-cee4e0e77859\") " pod="openshift-apiserver/apiserver-76f77b778f-7clsb" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.745718 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/51cad0ca-3fc8-486e-a7e9-05c470121cb8-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-gjnmj\" (UID: \"51cad0ca-3fc8-486e-a7e9-05c470121cb8\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-gjnmj" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.745789 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ebf98430-6317-40a9-be26-84a817cfbf1e-serving-cert\") pod \"console-operator-58897d9998-rtps2\" (UID: \"ebf98430-6317-40a9-be26-84a817cfbf1e\") " pod="openshift-console-operator/console-operator-58897d9998-rtps2" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.746109 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/88313132-e652-4c64-b607-5b806c93e153-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-hlnjg\" (UID: \"88313132-e652-4c64-b607-5b806c93e153\") " pod="openshift-authentication/oauth-openshift-558db77b4-hlnjg" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.746797 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/ad43d1f1-e9d0-400f-87fc-d397aebd5473-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-6svk5\" (UID: \"ad43d1f1-e9d0-400f-87fc-d397aebd5473\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-6svk5" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.747447 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/8aaee5e1-e8fd-430c-bc6b-7ac25c49f95b-etcd-client\") pod \"apiserver-7bbb656c7d-zdjs2\" (UID: \"8aaee5e1-e8fd-430c-bc6b-7ac25c49f95b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-zdjs2" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.747557 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/88313132-e652-4c64-b607-5b806c93e153-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-hlnjg\" (UID: \"88313132-e652-4c64-b607-5b806c93e153\") " pod="openshift-authentication/oauth-openshift-558db77b4-hlnjg" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.747572 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e89a2d49-34f1-4e24-8b62-5bd8bfe39b8b-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-wsp6s\" (UID: \"e89a2d49-34f1-4e24-8b62-5bd8bfe39b8b\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-wsp6s" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.747992 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/8aaee5e1-e8fd-430c-bc6b-7ac25c49f95b-encryption-config\") pod \"apiserver-7bbb656c7d-zdjs2\" (UID: \"8aaee5e1-e8fd-430c-bc6b-7ac25c49f95b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-zdjs2" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.748107 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/88313132-e652-4c64-b607-5b806c93e153-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-hlnjg\" (UID: \"88313132-e652-4c64-b607-5b806c93e153\") " pod="openshift-authentication/oauth-openshift-558db77b4-hlnjg" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.748976 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/4e8107af-7b4e-44e0-88f3-47fa08be03fa-proxy-tls\") pod \"machine-config-operator-74547568cd-p2bx4\" (UID: \"4e8107af-7b4e-44e0-88f3-47fa08be03fa\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-p2bx4" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.749418 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/fd8a9079-4c7c-467a-9e0d-fa0c2bc15482-console-oauth-config\") pod \"console-f9d7485db-wccw7\" (UID: \"fd8a9079-4c7c-467a-9e0d-fa0c2bc15482\") " pod="openshift-console/console-f9d7485db-wccw7" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.750501 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.765764 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/0d7070c6-e24b-4b05-b574-92b15066833c-srv-cert\") pod \"catalog-operator-68c6474976-8x4x6\" (UID: \"0d7070c6-e24b-4b05-b574-92b15066833c\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-8x4x6" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.769796 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.782516 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/0d7070c6-e24b-4b05-b574-92b15066833c-profile-collector-cert\") pod \"catalog-operator-68c6474976-8x4x6\" (UID: \"0d7070c6-e24b-4b05-b574-92b15066833c\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-8x4x6" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.791187 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.810645 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.816436 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/ddc50d0d-840b-41e8-821f-d4e97916051a-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-57m8s\" (UID: \"ddc50d0d-840b-41e8-821f-d4e97916051a\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-57m8s" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.841198 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b3dea818-360c-47a0-8b92-48cb0ab39cb4-trusted-ca\") pod \"ingress-operator-5b745b69d9-5mtmp\" (UID: \"b3dea818-360c-47a0-8b92-48cb0ab39cb4\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-5mtmp" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.841262 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vh86g\" (UniqueName: \"kubernetes.io/projected/eb7d772c-6ae9-4793-8727-653f80c2d8a0-kube-api-access-vh86g\") pod \"machine-config-controller-84d6567774-649xl\" (UID: \"eb7d772c-6ae9-4793-8727-653f80c2d8a0\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-649xl" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.841283 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/83061376-ca6f-4ccc-8da0-bede4a497b4f-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-j9tjw\" (UID: \"83061376-ca6f-4ccc-8da0-bede4a497b4f\") " pod="openshift-marketplace/marketplace-operator-79b997595-j9tjw" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.841312 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2e36610d-9f23-4c7e-9980-fad03a7392d9-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-97glv\" (UID: \"2e36610d-9f23-4c7e-9980-fad03a7392d9\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-97glv" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.841349 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b405a105-6441-41b4-90c2-ee4f6e07fb68-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-hs8xs\" (UID: \"b405a105-6441-41b4-90c2-ee4f6e07fb68\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-hs8xs" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.841369 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kt7wr\" (UniqueName: \"kubernetes.io/projected/b405a105-6441-41b4-90c2-ee4f6e07fb68-kube-api-access-kt7wr\") pod \"openshift-controller-manager-operator-756b6f6bc6-hs8xs\" (UID: \"b405a105-6441-41b4-90c2-ee4f6e07fb68\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-hs8xs" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.841424 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b405a105-6441-41b4-90c2-ee4f6e07fb68-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-hs8xs\" (UID: \"b405a105-6441-41b4-90c2-ee4f6e07fb68\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-hs8xs" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.841459 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/703e43ba-ed43-4ffc-92e3-0063568dcefc-profile-collector-cert\") pod \"olm-operator-6b444d44fb-nrcsn\" (UID: \"703e43ba-ed43-4ffc-92e3-0063568dcefc\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-nrcsn" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.841476 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k2pvh\" (UniqueName: \"kubernetes.io/projected/83061376-ca6f-4ccc-8da0-bede4a497b4f-kube-api-access-k2pvh\") pod \"marketplace-operator-79b997595-j9tjw\" (UID: \"83061376-ca6f-4ccc-8da0-bede4a497b4f\") " pod="openshift-marketplace/marketplace-operator-79b997595-j9tjw" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.841501 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c9prd\" (UniqueName: \"kubernetes.io/projected/0ee20e7b-f9fe-4717-bad6-16ca5936e100-kube-api-access-c9prd\") pod \"ingress-canary-rffld\" (UID: \"0ee20e7b-f9fe-4717-bad6-16ca5936e100\") " pod="openshift-ingress-canary/ingress-canary-rffld" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.841534 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m6j9v\" (UniqueName: \"kubernetes.io/projected/b3dea818-360c-47a0-8b92-48cb0ab39cb4-kube-api-access-m6j9v\") pod \"ingress-operator-5b745b69d9-5mtmp\" (UID: \"b3dea818-360c-47a0-8b92-48cb0ab39cb4\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-5mtmp" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.841587 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/9e48afb0-e9d1-4d51-a992-2de00ba12ec6-stats-auth\") pod \"router-default-5444994796-vpcnf\" (UID: \"9e48afb0-e9d1-4d51-a992-2de00ba12ec6\") " pod="openshift-ingress/router-default-5444994796-vpcnf" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.841604 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f363016c-d18e-4d16-96e3-35b871d6f130-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-bnk9v\" (UID: \"f363016c-d18e-4d16-96e3-35b871d6f130\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-bnk9v" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.841634 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nn2tf\" (UniqueName: \"kubernetes.io/projected/703e43ba-ed43-4ffc-92e3-0063568dcefc-kube-api-access-nn2tf\") pod \"olm-operator-6b444d44fb-nrcsn\" (UID: \"703e43ba-ed43-4ffc-92e3-0063568dcefc\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-nrcsn" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.841655 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q9mfz\" (UniqueName: \"kubernetes.io/projected/9e48afb0-e9d1-4d51-a992-2de00ba12ec6-kube-api-access-q9mfz\") pod \"router-default-5444994796-vpcnf\" (UID: \"9e48afb0-e9d1-4d51-a992-2de00ba12ec6\") " pod="openshift-ingress/router-default-5444994796-vpcnf" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.841671 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2e36610d-9f23-4c7e-9980-fad03a7392d9-config\") pod \"kube-controller-manager-operator-78b949d7b-97glv\" (UID: \"2e36610d-9f23-4c7e-9980-fad03a7392d9\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-97glv" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.841687 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/eb7d772c-6ae9-4793-8727-653f80c2d8a0-proxy-tls\") pod \"machine-config-controller-84d6567774-649xl\" (UID: \"eb7d772c-6ae9-4793-8727-653f80c2d8a0\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-649xl" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.841718 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/703e43ba-ed43-4ffc-92e3-0063568dcefc-srv-cert\") pod \"olm-operator-6b444d44fb-nrcsn\" (UID: \"703e43ba-ed43-4ffc-92e3-0063568dcefc\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-nrcsn" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.841734 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f363016c-d18e-4d16-96e3-35b871d6f130-config\") pod \"kube-apiserver-operator-766d6c64bb-bnk9v\" (UID: \"f363016c-d18e-4d16-96e3-35b871d6f130\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-bnk9v" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.841760 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/eb7d772c-6ae9-4793-8727-653f80c2d8a0-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-649xl\" (UID: \"eb7d772c-6ae9-4793-8727-653f80c2d8a0\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-649xl" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.841791 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/9e48afb0-e9d1-4d51-a992-2de00ba12ec6-service-ca-bundle\") pod \"router-default-5444994796-vpcnf\" (UID: \"9e48afb0-e9d1-4d51-a992-2de00ba12ec6\") " pod="openshift-ingress/router-default-5444994796-vpcnf" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.841806 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f363016c-d18e-4d16-96e3-35b871d6f130-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-bnk9v\" (UID: \"f363016c-d18e-4d16-96e3-35b871d6f130\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-bnk9v" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.841820 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/b3dea818-360c-47a0-8b92-48cb0ab39cb4-bound-sa-token\") pod \"ingress-operator-5b745b69d9-5mtmp\" (UID: \"b3dea818-360c-47a0-8b92-48cb0ab39cb4\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-5mtmp" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.841839 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/9e48afb0-e9d1-4d51-a992-2de00ba12ec6-default-certificate\") pod \"router-default-5444994796-vpcnf\" (UID: \"9e48afb0-e9d1-4d51-a992-2de00ba12ec6\") " pod="openshift-ingress/router-default-5444994796-vpcnf" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.841860 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/0ee20e7b-f9fe-4717-bad6-16ca5936e100-cert\") pod \"ingress-canary-rffld\" (UID: \"0ee20e7b-f9fe-4717-bad6-16ca5936e100\") " pod="openshift-ingress-canary/ingress-canary-rffld" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.841906 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/9e48afb0-e9d1-4d51-a992-2de00ba12ec6-metrics-certs\") pod \"router-default-5444994796-vpcnf\" (UID: \"9e48afb0-e9d1-4d51-a992-2de00ba12ec6\") " pod="openshift-ingress/router-default-5444994796-vpcnf" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.841925 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/b3dea818-360c-47a0-8b92-48cb0ab39cb4-metrics-tls\") pod \"ingress-operator-5b745b69d9-5mtmp\" (UID: \"b3dea818-360c-47a0-8b92-48cb0ab39cb4\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-5mtmp" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.841979 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/2e36610d-9f23-4c7e-9980-fad03a7392d9-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-97glv\" (UID: \"2e36610d-9f23-4c7e-9980-fad03a7392d9\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-97glv" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.841998 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/83061376-ca6f-4ccc-8da0-bede4a497b4f-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-j9tjw\" (UID: \"83061376-ca6f-4ccc-8da0-bede4a497b4f\") " pod="openshift-marketplace/marketplace-operator-79b997595-j9tjw" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.842574 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/eb7d772c-6ae9-4793-8727-653f80c2d8a0-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-649xl\" (UID: \"eb7d772c-6ae9-4793-8727-653f80c2d8a0\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-649xl" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.845717 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/703e43ba-ed43-4ffc-92e3-0063568dcefc-profile-collector-cert\") pod \"olm-operator-6b444d44fb-nrcsn\" (UID: \"703e43ba-ed43-4ffc-92e3-0063568dcefc\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-nrcsn" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.849995 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.869842 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.889437 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.909923 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.929935 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.949791 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.969565 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Oct 07 07:58:34 crc kubenswrapper[4875]: I1007 07:58:34.990552 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Oct 07 07:58:35 crc kubenswrapper[4875]: I1007 07:58:35.011027 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Oct 07 07:58:35 crc kubenswrapper[4875]: I1007 07:58:35.015252 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/eb7d772c-6ae9-4793-8727-653f80c2d8a0-proxy-tls\") pod \"machine-config-controller-84d6567774-649xl\" (UID: \"eb7d772c-6ae9-4793-8727-653f80c2d8a0\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-649xl" Oct 07 07:58:35 crc kubenswrapper[4875]: I1007 07:58:35.030278 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Oct 07 07:58:35 crc kubenswrapper[4875]: I1007 07:58:35.049938 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Oct 07 07:58:35 crc kubenswrapper[4875]: I1007 07:58:35.071068 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Oct 07 07:58:35 crc kubenswrapper[4875]: I1007 07:58:35.090616 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Oct 07 07:58:35 crc kubenswrapper[4875]: I1007 07:58:35.112116 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Oct 07 07:58:35 crc kubenswrapper[4875]: I1007 07:58:35.131602 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Oct 07 07:58:35 crc kubenswrapper[4875]: I1007 07:58:35.150471 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Oct 07 07:58:35 crc kubenswrapper[4875]: I1007 07:58:35.170731 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Oct 07 07:58:35 crc kubenswrapper[4875]: I1007 07:58:35.190650 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Oct 07 07:58:35 crc kubenswrapper[4875]: I1007 07:58:35.209838 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Oct 07 07:58:35 crc kubenswrapper[4875]: I1007 07:58:35.231685 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Oct 07 07:58:35 crc kubenswrapper[4875]: I1007 07:58:35.250405 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Oct 07 07:58:35 crc kubenswrapper[4875]: I1007 07:58:35.271517 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Oct 07 07:58:35 crc kubenswrapper[4875]: I1007 07:58:35.291093 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Oct 07 07:58:35 crc kubenswrapper[4875]: I1007 07:58:35.310630 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Oct 07 07:58:35 crc kubenswrapper[4875]: I1007 07:58:35.330687 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Oct 07 07:58:35 crc kubenswrapper[4875]: I1007 07:58:35.351039 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Oct 07 07:58:35 crc kubenswrapper[4875]: I1007 07:58:35.370926 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Oct 07 07:58:35 crc kubenswrapper[4875]: I1007 07:58:35.390753 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Oct 07 07:58:35 crc kubenswrapper[4875]: I1007 07:58:35.410271 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Oct 07 07:58:35 crc kubenswrapper[4875]: I1007 07:58:35.431526 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Oct 07 07:58:35 crc kubenswrapper[4875]: I1007 07:58:35.451925 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Oct 07 07:58:35 crc kubenswrapper[4875]: I1007 07:58:35.473551 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Oct 07 07:58:35 crc kubenswrapper[4875]: I1007 07:58:35.491363 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Oct 07 07:58:35 crc kubenswrapper[4875]: I1007 07:58:35.511400 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Oct 07 07:58:35 crc kubenswrapper[4875]: I1007 07:58:35.530749 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Oct 07 07:58:35 crc kubenswrapper[4875]: I1007 07:58:35.551356 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Oct 07 07:58:35 crc kubenswrapper[4875]: I1007 07:58:35.570873 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Oct 07 07:58:35 crc kubenswrapper[4875]: I1007 07:58:35.591068 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Oct 07 07:58:35 crc kubenswrapper[4875]: I1007 07:58:35.608212 4875 request.go:700] Waited for 1.004881259s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-controller-manager-operator/secrets?fieldSelector=metadata.name%3Dopenshift-controller-manager-operator-dockercfg-vw8fw&limit=500&resourceVersion=0 Oct 07 07:58:35 crc kubenswrapper[4875]: I1007 07:58:35.610871 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Oct 07 07:58:35 crc kubenswrapper[4875]: I1007 07:58:35.630535 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Oct 07 07:58:35 crc kubenswrapper[4875]: I1007 07:58:35.638414 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b405a105-6441-41b4-90c2-ee4f6e07fb68-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-hs8xs\" (UID: \"b405a105-6441-41b4-90c2-ee4f6e07fb68\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-hs8xs" Oct 07 07:58:35 crc kubenswrapper[4875]: I1007 07:58:35.651214 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Oct 07 07:58:35 crc kubenswrapper[4875]: I1007 07:58:35.652508 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b405a105-6441-41b4-90c2-ee4f6e07fb68-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-hs8xs\" (UID: \"b405a105-6441-41b4-90c2-ee4f6e07fb68\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-hs8xs" Oct 07 07:58:35 crc kubenswrapper[4875]: I1007 07:58:35.671816 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Oct 07 07:58:35 crc kubenswrapper[4875]: I1007 07:58:35.691790 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Oct 07 07:58:35 crc kubenswrapper[4875]: I1007 07:58:35.711967 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Oct 07 07:58:35 crc kubenswrapper[4875]: I1007 07:58:35.731163 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Oct 07 07:58:35 crc kubenswrapper[4875]: E1007 07:58:35.731291 4875 configmap.go:193] Couldn't get configMap openshift-controller-manager/client-ca: failed to sync configmap cache: timed out waiting for the condition Oct 07 07:58:35 crc kubenswrapper[4875]: E1007 07:58:35.731401 4875 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/de07f99e-f06a-4800-96f5-ecfebf9630f2-client-ca podName:de07f99e-f06a-4800-96f5-ecfebf9630f2 nodeName:}" failed. No retries permitted until 2025-10-07 07:58:36.23136216 +0000 UTC m=+141.191132743 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "client-ca" (UniqueName: "kubernetes.io/configmap/de07f99e-f06a-4800-96f5-ecfebf9630f2-client-ca") pod "controller-manager-879f6c89f-gk4f8" (UID: "de07f99e-f06a-4800-96f5-ecfebf9630f2") : failed to sync configmap cache: timed out waiting for the condition Oct 07 07:58:35 crc kubenswrapper[4875]: I1007 07:58:35.736320 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2e36610d-9f23-4c7e-9980-fad03a7392d9-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-97glv\" (UID: \"2e36610d-9f23-4c7e-9980-fad03a7392d9\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-97glv" Oct 07 07:58:35 crc kubenswrapper[4875]: E1007 07:58:35.743178 4875 secret.go:188] Couldn't get secret openshift-authentication/v4-0-config-user-template-error: failed to sync secret cache: timed out waiting for the condition Oct 07 07:58:35 crc kubenswrapper[4875]: E1007 07:58:35.743668 4875 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/88313132-e652-4c64-b607-5b806c93e153-v4-0-config-user-template-error podName:88313132-e652-4c64-b607-5b806c93e153 nodeName:}" failed. No retries permitted until 2025-10-07 07:58:36.243625946 +0000 UTC m=+141.203396519 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "v4-0-config-user-template-error" (UniqueName: "kubernetes.io/secret/88313132-e652-4c64-b607-5b806c93e153-v4-0-config-user-template-error") pod "oauth-openshift-558db77b4-hlnjg" (UID: "88313132-e652-4c64-b607-5b806c93e153") : failed to sync secret cache: timed out waiting for the condition Oct 07 07:58:35 crc kubenswrapper[4875]: E1007 07:58:35.746054 4875 configmap.go:193] Couldn't get configMap openshift-controller-manager/openshift-global-ca: failed to sync configmap cache: timed out waiting for the condition Oct 07 07:58:35 crc kubenswrapper[4875]: E1007 07:58:35.746164 4875 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/de07f99e-f06a-4800-96f5-ecfebf9630f2-proxy-ca-bundles podName:de07f99e-f06a-4800-96f5-ecfebf9630f2 nodeName:}" failed. No retries permitted until 2025-10-07 07:58:36.246137952 +0000 UTC m=+141.205908645 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "proxy-ca-bundles" (UniqueName: "kubernetes.io/configmap/de07f99e-f06a-4800-96f5-ecfebf9630f2-proxy-ca-bundles") pod "controller-manager-879f6c89f-gk4f8" (UID: "de07f99e-f06a-4800-96f5-ecfebf9630f2") : failed to sync configmap cache: timed out waiting for the condition Oct 07 07:58:35 crc kubenswrapper[4875]: I1007 07:58:35.751134 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Oct 07 07:58:35 crc kubenswrapper[4875]: I1007 07:58:35.753030 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2e36610d-9f23-4c7e-9980-fad03a7392d9-config\") pod \"kube-controller-manager-operator-78b949d7b-97glv\" (UID: \"2e36610d-9f23-4c7e-9980-fad03a7392d9\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-97glv" Oct 07 07:58:35 crc kubenswrapper[4875]: I1007 07:58:35.770078 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Oct 07 07:58:35 crc kubenswrapper[4875]: I1007 07:58:35.790237 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Oct 07 07:58:35 crc kubenswrapper[4875]: I1007 07:58:35.816466 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Oct 07 07:58:35 crc kubenswrapper[4875]: I1007 07:58:35.827316 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/b3dea818-360c-47a0-8b92-48cb0ab39cb4-metrics-tls\") pod \"ingress-operator-5b745b69d9-5mtmp\" (UID: \"b3dea818-360c-47a0-8b92-48cb0ab39cb4\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-5mtmp" Oct 07 07:58:35 crc kubenswrapper[4875]: I1007 07:58:35.838369 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Oct 07 07:58:35 crc kubenswrapper[4875]: E1007 07:58:35.841619 4875 configmap.go:193] Couldn't get configMap openshift-marketplace/marketplace-trusted-ca: failed to sync configmap cache: timed out waiting for the condition Oct 07 07:58:35 crc kubenswrapper[4875]: E1007 07:58:35.841746 4875 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/83061376-ca6f-4ccc-8da0-bede4a497b4f-marketplace-trusted-ca podName:83061376-ca6f-4ccc-8da0-bede4a497b4f nodeName:}" failed. No retries permitted until 2025-10-07 07:58:36.341711996 +0000 UTC m=+141.301482539 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "marketplace-trusted-ca" (UniqueName: "kubernetes.io/configmap/83061376-ca6f-4ccc-8da0-bede4a497b4f-marketplace-trusted-ca") pod "marketplace-operator-79b997595-j9tjw" (UID: "83061376-ca6f-4ccc-8da0-bede4a497b4f") : failed to sync configmap cache: timed out waiting for the condition Oct 07 07:58:35 crc kubenswrapper[4875]: E1007 07:58:35.841767 4875 secret.go:188] Couldn't get secret openshift-ingress/router-stats-default: failed to sync secret cache: timed out waiting for the condition Oct 07 07:58:35 crc kubenswrapper[4875]: E1007 07:58:35.841852 4875 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/9e48afb0-e9d1-4d51-a992-2de00ba12ec6-stats-auth podName:9e48afb0-e9d1-4d51-a992-2de00ba12ec6 nodeName:}" failed. No retries permitted until 2025-10-07 07:58:36.341830139 +0000 UTC m=+141.301600692 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "stats-auth" (UniqueName: "kubernetes.io/secret/9e48afb0-e9d1-4d51-a992-2de00ba12ec6-stats-auth") pod "router-default-5444994796-vpcnf" (UID: "9e48afb0-e9d1-4d51-a992-2de00ba12ec6") : failed to sync secret cache: timed out waiting for the condition Oct 07 07:58:35 crc kubenswrapper[4875]: E1007 07:58:35.841865 4875 secret.go:188] Couldn't get secret openshift-operator-lifecycle-manager/olm-operator-serving-cert: failed to sync secret cache: timed out waiting for the condition Oct 07 07:58:35 crc kubenswrapper[4875]: E1007 07:58:35.841942 4875 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/703e43ba-ed43-4ffc-92e3-0063568dcefc-srv-cert podName:703e43ba-ed43-4ffc-92e3-0063568dcefc nodeName:}" failed. No retries permitted until 2025-10-07 07:58:36.341927392 +0000 UTC m=+141.301697945 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "srv-cert" (UniqueName: "kubernetes.io/secret/703e43ba-ed43-4ffc-92e3-0063568dcefc-srv-cert") pod "olm-operator-6b444d44fb-nrcsn" (UID: "703e43ba-ed43-4ffc-92e3-0063568dcefc") : failed to sync secret cache: timed out waiting for the condition Oct 07 07:58:35 crc kubenswrapper[4875]: E1007 07:58:35.841943 4875 secret.go:188] Couldn't get secret openshift-kube-apiserver-operator/kube-apiserver-operator-serving-cert: failed to sync secret cache: timed out waiting for the condition Oct 07 07:58:35 crc kubenswrapper[4875]: E1007 07:58:35.841950 4875 configmap.go:193] Couldn't get configMap openshift-kube-apiserver-operator/kube-apiserver-operator-config: failed to sync configmap cache: timed out waiting for the condition Oct 07 07:58:35 crc kubenswrapper[4875]: E1007 07:58:35.842004 4875 configmap.go:193] Couldn't get configMap openshift-ingress/service-ca-bundle: failed to sync configmap cache: timed out waiting for the condition Oct 07 07:58:35 crc kubenswrapper[4875]: E1007 07:58:35.842039 4875 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f363016c-d18e-4d16-96e3-35b871d6f130-serving-cert podName:f363016c-d18e-4d16-96e3-35b871d6f130 nodeName:}" failed. No retries permitted until 2025-10-07 07:58:36.341997994 +0000 UTC m=+141.301768557 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "serving-cert" (UniqueName: "kubernetes.io/secret/f363016c-d18e-4d16-96e3-35b871d6f130-serving-cert") pod "kube-apiserver-operator-766d6c64bb-bnk9v" (UID: "f363016c-d18e-4d16-96e3-35b871d6f130") : failed to sync secret cache: timed out waiting for the condition Oct 07 07:58:35 crc kubenswrapper[4875]: E1007 07:58:35.842035 4875 secret.go:188] Couldn't get secret openshift-ingress/router-certs-default: failed to sync secret cache: timed out waiting for the condition Oct 07 07:58:35 crc kubenswrapper[4875]: E1007 07:58:35.842065 4875 secret.go:188] Couldn't get secret openshift-ingress/router-metrics-certs-default: failed to sync secret cache: timed out waiting for the condition Oct 07 07:58:35 crc kubenswrapper[4875]: E1007 07:58:35.842045 4875 secret.go:188] Couldn't get secret openshift-ingress-canary/canary-serving-cert: failed to sync secret cache: timed out waiting for the condition Oct 07 07:58:35 crc kubenswrapper[4875]: E1007 07:58:35.842068 4875 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/9e48afb0-e9d1-4d51-a992-2de00ba12ec6-service-ca-bundle podName:9e48afb0-e9d1-4d51-a992-2de00ba12ec6 nodeName:}" failed. No retries permitted until 2025-10-07 07:58:36.342054356 +0000 UTC m=+141.301824909 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "service-ca-bundle" (UniqueName: "kubernetes.io/configmap/9e48afb0-e9d1-4d51-a992-2de00ba12ec6-service-ca-bundle") pod "router-default-5444994796-vpcnf" (UID: "9e48afb0-e9d1-4d51-a992-2de00ba12ec6") : failed to sync configmap cache: timed out waiting for the condition Oct 07 07:58:35 crc kubenswrapper[4875]: E1007 07:58:35.842145 4875 secret.go:188] Couldn't get secret openshift-marketplace/marketplace-operator-metrics: failed to sync secret cache: timed out waiting for the condition Oct 07 07:58:35 crc kubenswrapper[4875]: E1007 07:58:35.842168 4875 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/f363016c-d18e-4d16-96e3-35b871d6f130-config podName:f363016c-d18e-4d16-96e3-35b871d6f130 nodeName:}" failed. No retries permitted until 2025-10-07 07:58:36.342155979 +0000 UTC m=+141.301926522 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config" (UniqueName: "kubernetes.io/configmap/f363016c-d18e-4d16-96e3-35b871d6f130-config") pod "kube-apiserver-operator-766d6c64bb-bnk9v" (UID: "f363016c-d18e-4d16-96e3-35b871d6f130") : failed to sync configmap cache: timed out waiting for the condition Oct 07 07:58:35 crc kubenswrapper[4875]: E1007 07:58:35.842187 4875 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/9e48afb0-e9d1-4d51-a992-2de00ba12ec6-metrics-certs podName:9e48afb0-e9d1-4d51-a992-2de00ba12ec6 nodeName:}" failed. No retries permitted until 2025-10-07 07:58:36.34217798 +0000 UTC m=+141.301948523 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/9e48afb0-e9d1-4d51-a992-2de00ba12ec6-metrics-certs") pod "router-default-5444994796-vpcnf" (UID: "9e48afb0-e9d1-4d51-a992-2de00ba12ec6") : failed to sync secret cache: timed out waiting for the condition Oct 07 07:58:35 crc kubenswrapper[4875]: E1007 07:58:35.842206 4875 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/9e48afb0-e9d1-4d51-a992-2de00ba12ec6-default-certificate podName:9e48afb0-e9d1-4d51-a992-2de00ba12ec6 nodeName:}" failed. No retries permitted until 2025-10-07 07:58:36.342196191 +0000 UTC m=+141.301966734 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "default-certificate" (UniqueName: "kubernetes.io/secret/9e48afb0-e9d1-4d51-a992-2de00ba12ec6-default-certificate") pod "router-default-5444994796-vpcnf" (UID: "9e48afb0-e9d1-4d51-a992-2de00ba12ec6") : failed to sync secret cache: timed out waiting for the condition Oct 07 07:58:35 crc kubenswrapper[4875]: E1007 07:58:35.842235 4875 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/0ee20e7b-f9fe-4717-bad6-16ca5936e100-cert podName:0ee20e7b-f9fe-4717-bad6-16ca5936e100 nodeName:}" failed. No retries permitted until 2025-10-07 07:58:36.342229162 +0000 UTC m=+141.301999705 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/0ee20e7b-f9fe-4717-bad6-16ca5936e100-cert") pod "ingress-canary-rffld" (UID: "0ee20e7b-f9fe-4717-bad6-16ca5936e100") : failed to sync secret cache: timed out waiting for the condition Oct 07 07:58:35 crc kubenswrapper[4875]: E1007 07:58:35.842248 4875 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/83061376-ca6f-4ccc-8da0-bede4a497b4f-marketplace-operator-metrics podName:83061376-ca6f-4ccc-8da0-bede4a497b4f nodeName:}" failed. No retries permitted until 2025-10-07 07:58:36.342242242 +0000 UTC m=+141.302012785 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "marketplace-operator-metrics" (UniqueName: "kubernetes.io/secret/83061376-ca6f-4ccc-8da0-bede4a497b4f-marketplace-operator-metrics") pod "marketplace-operator-79b997595-j9tjw" (UID: "83061376-ca6f-4ccc-8da0-bede4a497b4f") : failed to sync secret cache: timed out waiting for the condition Oct 07 07:58:35 crc kubenswrapper[4875]: I1007 07:58:35.843642 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b3dea818-360c-47a0-8b92-48cb0ab39cb4-trusted-ca\") pod \"ingress-operator-5b745b69d9-5mtmp\" (UID: \"b3dea818-360c-47a0-8b92-48cb0ab39cb4\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-5mtmp" Oct 07 07:58:35 crc kubenswrapper[4875]: I1007 07:58:35.850899 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Oct 07 07:58:35 crc kubenswrapper[4875]: I1007 07:58:35.870492 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Oct 07 07:58:35 crc kubenswrapper[4875]: I1007 07:58:35.891312 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Oct 07 07:58:35 crc kubenswrapper[4875]: I1007 07:58:35.928476 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Oct 07 07:58:35 crc kubenswrapper[4875]: I1007 07:58:35.930020 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Oct 07 07:58:35 crc kubenswrapper[4875]: I1007 07:58:35.951049 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Oct 07 07:58:35 crc kubenswrapper[4875]: I1007 07:58:35.971702 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Oct 07 07:58:35 crc kubenswrapper[4875]: I1007 07:58:35.989984 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Oct 07 07:58:36 crc kubenswrapper[4875]: I1007 07:58:36.011165 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Oct 07 07:58:36 crc kubenswrapper[4875]: I1007 07:58:36.030967 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Oct 07 07:58:36 crc kubenswrapper[4875]: I1007 07:58:36.050211 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Oct 07 07:58:36 crc kubenswrapper[4875]: I1007 07:58:36.070512 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Oct 07 07:58:36 crc kubenswrapper[4875]: I1007 07:58:36.090963 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Oct 07 07:58:36 crc kubenswrapper[4875]: I1007 07:58:36.110979 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Oct 07 07:58:36 crc kubenswrapper[4875]: I1007 07:58:36.130084 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Oct 07 07:58:36 crc kubenswrapper[4875]: I1007 07:58:36.151239 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Oct 07 07:58:36 crc kubenswrapper[4875]: I1007 07:58:36.170948 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Oct 07 07:58:36 crc kubenswrapper[4875]: I1007 07:58:36.190665 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Oct 07 07:58:36 crc kubenswrapper[4875]: I1007 07:58:36.210189 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Oct 07 07:58:36 crc kubenswrapper[4875]: I1007 07:58:36.232209 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Oct 07 07:58:36 crc kubenswrapper[4875]: I1007 07:58:36.250913 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Oct 07 07:58:36 crc kubenswrapper[4875]: I1007 07:58:36.266244 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/de07f99e-f06a-4800-96f5-ecfebf9630f2-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-gk4f8\" (UID: \"de07f99e-f06a-4800-96f5-ecfebf9630f2\") " pod="openshift-controller-manager/controller-manager-879f6c89f-gk4f8" Oct 07 07:58:36 crc kubenswrapper[4875]: I1007 07:58:36.266649 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/88313132-e652-4c64-b607-5b806c93e153-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-hlnjg\" (UID: \"88313132-e652-4c64-b607-5b806c93e153\") " pod="openshift-authentication/oauth-openshift-558db77b4-hlnjg" Oct 07 07:58:36 crc kubenswrapper[4875]: I1007 07:58:36.266924 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/de07f99e-f06a-4800-96f5-ecfebf9630f2-client-ca\") pod \"controller-manager-879f6c89f-gk4f8\" (UID: \"de07f99e-f06a-4800-96f5-ecfebf9630f2\") " pod="openshift-controller-manager/controller-manager-879f6c89f-gk4f8" Oct 07 07:58:36 crc kubenswrapper[4875]: I1007 07:58:36.270265 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Oct 07 07:58:36 crc kubenswrapper[4875]: I1007 07:58:36.291289 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Oct 07 07:58:36 crc kubenswrapper[4875]: I1007 07:58:36.310507 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Oct 07 07:58:36 crc kubenswrapper[4875]: I1007 07:58:36.330229 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Oct 07 07:58:36 crc kubenswrapper[4875]: I1007 07:58:36.350505 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Oct 07 07:58:36 crc kubenswrapper[4875]: I1007 07:58:36.368214 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/703e43ba-ed43-4ffc-92e3-0063568dcefc-srv-cert\") pod \"olm-operator-6b444d44fb-nrcsn\" (UID: \"703e43ba-ed43-4ffc-92e3-0063568dcefc\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-nrcsn" Oct 07 07:58:36 crc kubenswrapper[4875]: I1007 07:58:36.368713 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f363016c-d18e-4d16-96e3-35b871d6f130-config\") pod \"kube-apiserver-operator-766d6c64bb-bnk9v\" (UID: \"f363016c-d18e-4d16-96e3-35b871d6f130\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-bnk9v" Oct 07 07:58:36 crc kubenswrapper[4875]: I1007 07:58:36.368912 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/9e48afb0-e9d1-4d51-a992-2de00ba12ec6-service-ca-bundle\") pod \"router-default-5444994796-vpcnf\" (UID: \"9e48afb0-e9d1-4d51-a992-2de00ba12ec6\") " pod="openshift-ingress/router-default-5444994796-vpcnf" Oct 07 07:58:36 crc kubenswrapper[4875]: I1007 07:58:36.369036 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/9e48afb0-e9d1-4d51-a992-2de00ba12ec6-default-certificate\") pod \"router-default-5444994796-vpcnf\" (UID: \"9e48afb0-e9d1-4d51-a992-2de00ba12ec6\") " pod="openshift-ingress/router-default-5444994796-vpcnf" Oct 07 07:58:36 crc kubenswrapper[4875]: I1007 07:58:36.369117 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/9e48afb0-e9d1-4d51-a992-2de00ba12ec6-metrics-certs\") pod \"router-default-5444994796-vpcnf\" (UID: \"9e48afb0-e9d1-4d51-a992-2de00ba12ec6\") " pod="openshift-ingress/router-default-5444994796-vpcnf" Oct 07 07:58:36 crc kubenswrapper[4875]: I1007 07:58:36.369212 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/0ee20e7b-f9fe-4717-bad6-16ca5936e100-cert\") pod \"ingress-canary-rffld\" (UID: \"0ee20e7b-f9fe-4717-bad6-16ca5936e100\") " pod="openshift-ingress-canary/ingress-canary-rffld" Oct 07 07:58:36 crc kubenswrapper[4875]: I1007 07:58:36.369362 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/83061376-ca6f-4ccc-8da0-bede4a497b4f-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-j9tjw\" (UID: \"83061376-ca6f-4ccc-8da0-bede4a497b4f\") " pod="openshift-marketplace/marketplace-operator-79b997595-j9tjw" Oct 07 07:58:36 crc kubenswrapper[4875]: I1007 07:58:36.370128 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/83061376-ca6f-4ccc-8da0-bede4a497b4f-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-j9tjw\" (UID: \"83061376-ca6f-4ccc-8da0-bede4a497b4f\") " pod="openshift-marketplace/marketplace-operator-79b997595-j9tjw" Oct 07 07:58:36 crc kubenswrapper[4875]: I1007 07:58:36.370313 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f363016c-d18e-4d16-96e3-35b871d6f130-config\") pod \"kube-apiserver-operator-766d6c64bb-bnk9v\" (UID: \"f363016c-d18e-4d16-96e3-35b871d6f130\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-bnk9v" Oct 07 07:58:36 crc kubenswrapper[4875]: I1007 07:58:36.370508 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/9e48afb0-e9d1-4d51-a992-2de00ba12ec6-stats-auth\") pod \"router-default-5444994796-vpcnf\" (UID: \"9e48afb0-e9d1-4d51-a992-2de00ba12ec6\") " pod="openshift-ingress/router-default-5444994796-vpcnf" Oct 07 07:58:36 crc kubenswrapper[4875]: I1007 07:58:36.370600 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f363016c-d18e-4d16-96e3-35b871d6f130-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-bnk9v\" (UID: \"f363016c-d18e-4d16-96e3-35b871d6f130\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-bnk9v" Oct 07 07:58:36 crc kubenswrapper[4875]: I1007 07:58:36.370964 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Oct 07 07:58:36 crc kubenswrapper[4875]: I1007 07:58:36.371067 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/9e48afb0-e9d1-4d51-a992-2de00ba12ec6-service-ca-bundle\") pod \"router-default-5444994796-vpcnf\" (UID: \"9e48afb0-e9d1-4d51-a992-2de00ba12ec6\") " pod="openshift-ingress/router-default-5444994796-vpcnf" Oct 07 07:58:36 crc kubenswrapper[4875]: I1007 07:58:36.372568 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/703e43ba-ed43-4ffc-92e3-0063568dcefc-srv-cert\") pod \"olm-operator-6b444d44fb-nrcsn\" (UID: \"703e43ba-ed43-4ffc-92e3-0063568dcefc\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-nrcsn" Oct 07 07:58:36 crc kubenswrapper[4875]: I1007 07:58:36.373300 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/83061376-ca6f-4ccc-8da0-bede4a497b4f-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-j9tjw\" (UID: \"83061376-ca6f-4ccc-8da0-bede4a497b4f\") " pod="openshift-marketplace/marketplace-operator-79b997595-j9tjw" Oct 07 07:58:36 crc kubenswrapper[4875]: I1007 07:58:36.373652 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/9e48afb0-e9d1-4d51-a992-2de00ba12ec6-default-certificate\") pod \"router-default-5444994796-vpcnf\" (UID: \"9e48afb0-e9d1-4d51-a992-2de00ba12ec6\") " pod="openshift-ingress/router-default-5444994796-vpcnf" Oct 07 07:58:36 crc kubenswrapper[4875]: I1007 07:58:36.373770 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/0ee20e7b-f9fe-4717-bad6-16ca5936e100-cert\") pod \"ingress-canary-rffld\" (UID: \"0ee20e7b-f9fe-4717-bad6-16ca5936e100\") " pod="openshift-ingress-canary/ingress-canary-rffld" Oct 07 07:58:36 crc kubenswrapper[4875]: I1007 07:58:36.373957 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/9e48afb0-e9d1-4d51-a992-2de00ba12ec6-metrics-certs\") pod \"router-default-5444994796-vpcnf\" (UID: \"9e48afb0-e9d1-4d51-a992-2de00ba12ec6\") " pod="openshift-ingress/router-default-5444994796-vpcnf" Oct 07 07:58:36 crc kubenswrapper[4875]: I1007 07:58:36.374419 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/83061376-ca6f-4ccc-8da0-bede4a497b4f-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-j9tjw\" (UID: \"83061376-ca6f-4ccc-8da0-bede4a497b4f\") " pod="openshift-marketplace/marketplace-operator-79b997595-j9tjw" Oct 07 07:58:36 crc kubenswrapper[4875]: I1007 07:58:36.375260 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f363016c-d18e-4d16-96e3-35b871d6f130-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-bnk9v\" (UID: \"f363016c-d18e-4d16-96e3-35b871d6f130\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-bnk9v" Oct 07 07:58:36 crc kubenswrapper[4875]: I1007 07:58:36.375424 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/9e48afb0-e9d1-4d51-a992-2de00ba12ec6-stats-auth\") pod \"router-default-5444994796-vpcnf\" (UID: \"9e48afb0-e9d1-4d51-a992-2de00ba12ec6\") " pod="openshift-ingress/router-default-5444994796-vpcnf" Oct 07 07:58:36 crc kubenswrapper[4875]: I1007 07:58:36.410367 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Oct 07 07:58:36 crc kubenswrapper[4875]: I1007 07:58:36.430235 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Oct 07 07:58:36 crc kubenswrapper[4875]: I1007 07:58:36.450078 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Oct 07 07:58:36 crc kubenswrapper[4875]: I1007 07:58:36.469862 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Oct 07 07:58:36 crc kubenswrapper[4875]: I1007 07:58:36.490604 4875 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Oct 07 07:58:36 crc kubenswrapper[4875]: I1007 07:58:36.510761 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Oct 07 07:58:36 crc kubenswrapper[4875]: I1007 07:58:36.546089 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rxwxx\" (UniqueName: \"kubernetes.io/projected/fd8a9079-4c7c-467a-9e0d-fa0c2bc15482-kube-api-access-rxwxx\") pod \"console-f9d7485db-wccw7\" (UID: \"fd8a9079-4c7c-467a-9e0d-fa0c2bc15482\") " pod="openshift-console/console-f9d7485db-wccw7" Oct 07 07:58:36 crc kubenswrapper[4875]: I1007 07:58:36.566601 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lkh8d\" (UniqueName: \"kubernetes.io/projected/97431ef0-70e0-4c93-9ebc-ba3c9823685f-kube-api-access-lkh8d\") pod \"downloads-7954f5f757-4fthd\" (UID: \"97431ef0-70e0-4c93-9ebc-ba3c9823685f\") " pod="openshift-console/downloads-7954f5f757-4fthd" Oct 07 07:58:36 crc kubenswrapper[4875]: I1007 07:58:36.587133 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/ad43d1f1-e9d0-400f-87fc-d397aebd5473-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-6svk5\" (UID: \"ad43d1f1-e9d0-400f-87fc-d397aebd5473\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-6svk5" Oct 07 07:58:36 crc kubenswrapper[4875]: I1007 07:58:36.607573 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kvf2h\" (UniqueName: \"kubernetes.io/projected/c26c9450-7ff8-4142-b007-01a00adbb28d-kube-api-access-kvf2h\") pod \"route-controller-manager-6576b87f9c-cf7b2\" (UID: \"c26c9450-7ff8-4142-b007-01a00adbb28d\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-cf7b2" Oct 07 07:58:36 crc kubenswrapper[4875]: I1007 07:58:36.608375 4875 request.go:700] Waited for 1.875857006s due to client-side throttling, not priority and fairness, request: POST:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-cluster-machine-approver/serviceaccounts/machine-approver-sa/token Oct 07 07:58:36 crc kubenswrapper[4875]: I1007 07:58:36.631518 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zxqkw\" (UniqueName: \"kubernetes.io/projected/ace64483-508c-4ffc-9519-0186712eea32-kube-api-access-zxqkw\") pod \"machine-approver-56656f9798-q5g8q\" (UID: \"ace64483-508c-4ffc-9519-0186712eea32\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-q5g8q" Oct 07 07:58:36 crc kubenswrapper[4875]: I1007 07:58:36.647635 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xkzx6\" (UniqueName: \"kubernetes.io/projected/de07f99e-f06a-4800-96f5-ecfebf9630f2-kube-api-access-xkzx6\") pod \"controller-manager-879f6c89f-gk4f8\" (UID: \"de07f99e-f06a-4800-96f5-ecfebf9630f2\") " pod="openshift-controller-manager/controller-manager-879f6c89f-gk4f8" Oct 07 07:58:36 crc kubenswrapper[4875]: I1007 07:58:36.650642 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-q5g8q" Oct 07 07:58:36 crc kubenswrapper[4875]: I1007 07:58:36.668553 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sq7bb\" (UniqueName: \"kubernetes.io/projected/550c8dbf-53c3-44c9-87c8-d7bc275f384e-kube-api-access-sq7bb\") pod \"cluster-samples-operator-665b6dd947-72b9h\" (UID: \"550c8dbf-53c3-44c9-87c8-d7bc275f384e\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-72b9h" Oct 07 07:58:36 crc kubenswrapper[4875]: I1007 07:58:36.689343 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wv9n5\" (UniqueName: \"kubernetes.io/projected/4e8107af-7b4e-44e0-88f3-47fa08be03fa-kube-api-access-wv9n5\") pod \"machine-config-operator-74547568cd-p2bx4\" (UID: \"4e8107af-7b4e-44e0-88f3-47fa08be03fa\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-p2bx4" Oct 07 07:58:36 crc kubenswrapper[4875]: I1007 07:58:36.702999 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-cf7b2" Oct 07 07:58:36 crc kubenswrapper[4875]: I1007 07:58:36.707532 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kk4gk\" (UniqueName: \"kubernetes.io/projected/ebf98430-6317-40a9-be26-84a817cfbf1e-kube-api-access-kk4gk\") pod \"console-operator-58897d9998-rtps2\" (UID: \"ebf98430-6317-40a9-be26-84a817cfbf1e\") " pod="openshift-console-operator/console-operator-58897d9998-rtps2" Oct 07 07:58:36 crc kubenswrapper[4875]: I1007 07:58:36.711846 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-rtps2" Oct 07 07:58:36 crc kubenswrapper[4875]: I1007 07:58:36.725180 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-72b9h" Oct 07 07:58:36 crc kubenswrapper[4875]: I1007 07:58:36.728497 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-846sr\" (UniqueName: \"kubernetes.io/projected/51cad0ca-3fc8-486e-a7e9-05c470121cb8-kube-api-access-846sr\") pod \"authentication-operator-69f744f599-gjnmj\" (UID: \"51cad0ca-3fc8-486e-a7e9-05c470121cb8\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-gjnmj" Oct 07 07:58:36 crc kubenswrapper[4875]: I1007 07:58:36.731401 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-gjnmj" Oct 07 07:58:36 crc kubenswrapper[4875]: I1007 07:58:36.741352 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-wccw7" Oct 07 07:58:36 crc kubenswrapper[4875]: I1007 07:58:36.747778 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rq67v\" (UniqueName: \"kubernetes.io/projected/8aaee5e1-e8fd-430c-bc6b-7ac25c49f95b-kube-api-access-rq67v\") pod \"apiserver-7bbb656c7d-zdjs2\" (UID: \"8aaee5e1-e8fd-430c-bc6b-7ac25c49f95b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-zdjs2" Oct 07 07:58:36 crc kubenswrapper[4875]: I1007 07:58:36.748775 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-4fthd" Oct 07 07:58:36 crc kubenswrapper[4875]: I1007 07:58:36.768653 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7zsbf\" (UniqueName: \"kubernetes.io/projected/665d7a28-86ba-498f-a7b4-60d5c7e8ef8c-kube-api-access-7zsbf\") pod \"multus-admission-controller-857f4d67dd-pxkvz\" (UID: \"665d7a28-86ba-498f-a7b4-60d5c7e8ef8c\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-pxkvz" Oct 07 07:58:36 crc kubenswrapper[4875]: I1007 07:58:36.776865 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-zdjs2" Oct 07 07:58:36 crc kubenswrapper[4875]: I1007 07:58:36.784170 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-p2bx4" Oct 07 07:58:36 crc kubenswrapper[4875]: I1007 07:58:36.789363 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nlw97\" (UniqueName: \"kubernetes.io/projected/5679969e-90bf-49f0-b478-7312b6e13a05-kube-api-access-nlw97\") pod \"machine-api-operator-5694c8668f-zpc28\" (UID: \"5679969e-90bf-49f0-b478-7312b6e13a05\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-zpc28" Oct 07 07:58:36 crc kubenswrapper[4875]: I1007 07:58:36.803756 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-pxkvz" Oct 07 07:58:36 crc kubenswrapper[4875]: I1007 07:58:36.812619 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5ngbq\" (UniqueName: \"kubernetes.io/projected/ad43d1f1-e9d0-400f-87fc-d397aebd5473-kube-api-access-5ngbq\") pod \"cluster-image-registry-operator-dc59b4c8b-6svk5\" (UID: \"ad43d1f1-e9d0-400f-87fc-d397aebd5473\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-6svk5" Oct 07 07:58:36 crc kubenswrapper[4875]: I1007 07:58:36.826035 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hgsng\" (UniqueName: \"kubernetes.io/projected/ddc50d0d-840b-41e8-821f-d4e97916051a-kube-api-access-hgsng\") pod \"package-server-manager-789f6589d5-57m8s\" (UID: \"ddc50d0d-840b-41e8-821f-d4e97916051a\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-57m8s" Oct 07 07:58:36 crc kubenswrapper[4875]: I1007 07:58:36.862276 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tg8wx\" (UniqueName: \"kubernetes.io/projected/0d7070c6-e24b-4b05-b574-92b15066833c-kube-api-access-tg8wx\") pod \"catalog-operator-68c6474976-8x4x6\" (UID: \"0d7070c6-e24b-4b05-b574-92b15066833c\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-8x4x6" Oct 07 07:58:36 crc kubenswrapper[4875]: I1007 07:58:36.871608 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-zpc28" Oct 07 07:58:36 crc kubenswrapper[4875]: I1007 07:58:36.877502 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x6qq9\" (UniqueName: \"kubernetes.io/projected/88313132-e652-4c64-b607-5b806c93e153-kube-api-access-x6qq9\") pod \"oauth-openshift-558db77b4-hlnjg\" (UID: \"88313132-e652-4c64-b607-5b806c93e153\") " pod="openshift-authentication/oauth-openshift-558db77b4-hlnjg" Oct 07 07:58:36 crc kubenswrapper[4875]: I1007 07:58:36.893200 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/67bb7921-f940-46a4-80c8-aa1aeae3b33b-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-lqcfl\" (UID: \"67bb7921-f940-46a4-80c8-aa1aeae3b33b\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-lqcfl" Oct 07 07:58:36 crc kubenswrapper[4875]: I1007 07:58:36.903250 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gblh2\" (UniqueName: \"kubernetes.io/projected/e89a2d49-34f1-4e24-8b62-5bd8bfe39b8b-kube-api-access-gblh2\") pod \"openshift-apiserver-operator-796bbdcf4f-wsp6s\" (UID: \"e89a2d49-34f1-4e24-8b62-5bd8bfe39b8b\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-wsp6s" Oct 07 07:58:36 crc kubenswrapper[4875]: I1007 07:58:36.927480 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lpdw7\" (UniqueName: \"kubernetes.io/projected/d5c0d7ea-c293-4a7d-ba73-3fceeb8b6ea4-kube-api-access-lpdw7\") pod \"kube-storage-version-migrator-operator-b67b599dd-mgh7p\" (UID: \"d5c0d7ea-c293-4a7d-ba73-3fceeb8b6ea4\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-mgh7p" Oct 07 07:58:36 crc kubenswrapper[4875]: I1007 07:58:36.955138 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tlqcw\" (UniqueName: \"kubernetes.io/projected/e612704a-aea6-48f6-82c1-cee4e0e77859-kube-api-access-tlqcw\") pod \"apiserver-76f77b778f-7clsb\" (UID: \"e612704a-aea6-48f6-82c1-cee4e0e77859\") " pod="openshift-apiserver/apiserver-76f77b778f-7clsb" Oct 07 07:58:36 crc kubenswrapper[4875]: I1007 07:58:36.968270 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-57m8s" Oct 07 07:58:36 crc kubenswrapper[4875]: I1007 07:58:36.982979 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-wsp6s" Oct 07 07:58:36 crc kubenswrapper[4875]: I1007 07:58:36.992088 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kt7wr\" (UniqueName: \"kubernetes.io/projected/b405a105-6441-41b4-90c2-ee4f6e07fb68-kube-api-access-kt7wr\") pod \"openshift-controller-manager-operator-756b6f6bc6-hs8xs\" (UID: \"b405a105-6441-41b4-90c2-ee4f6e07fb68\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-hs8xs" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.005916 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c9prd\" (UniqueName: \"kubernetes.io/projected/0ee20e7b-f9fe-4717-bad6-16ca5936e100-kube-api-access-c9prd\") pod \"ingress-canary-rffld\" (UID: \"0ee20e7b-f9fe-4717-bad6-16ca5936e100\") " pod="openshift-ingress-canary/ingress-canary-rffld" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.025021 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m6j9v\" (UniqueName: \"kubernetes.io/projected/b3dea818-360c-47a0-8b92-48cb0ab39cb4-kube-api-access-m6j9v\") pod \"ingress-operator-5b745b69d9-5mtmp\" (UID: \"b3dea818-360c-47a0-8b92-48cb0ab39cb4\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-5mtmp" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.046607 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-rffld" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.053705 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k2pvh\" (UniqueName: \"kubernetes.io/projected/83061376-ca6f-4ccc-8da0-bede4a497b4f-kube-api-access-k2pvh\") pod \"marketplace-operator-79b997595-j9tjw\" (UID: \"83061376-ca6f-4ccc-8da0-bede4a497b4f\") " pod="openshift-marketplace/marketplace-operator-79b997595-j9tjw" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.066459 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-lqcfl" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.068500 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-6svk5" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.073222 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nn2tf\" (UniqueName: \"kubernetes.io/projected/703e43ba-ed43-4ffc-92e3-0063568dcefc-kube-api-access-nn2tf\") pod \"olm-operator-6b444d44fb-nrcsn\" (UID: \"703e43ba-ed43-4ffc-92e3-0063568dcefc\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-nrcsn" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.085072 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q9mfz\" (UniqueName: \"kubernetes.io/projected/9e48afb0-e9d1-4d51-a992-2de00ba12ec6-kube-api-access-q9mfz\") pod \"router-default-5444994796-vpcnf\" (UID: \"9e48afb0-e9d1-4d51-a992-2de00ba12ec6\") " pod="openshift-ingress/router-default-5444994796-vpcnf" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.093398 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-mgh7p" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.108191 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f363016c-d18e-4d16-96e3-35b871d6f130-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-bnk9v\" (UID: \"f363016c-d18e-4d16-96e3-35b871d6f130\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-bnk9v" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.115190 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-8x4x6" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.144707 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/b3dea818-360c-47a0-8b92-48cb0ab39cb4-bound-sa-token\") pod \"ingress-operator-5b745b69d9-5mtmp\" (UID: \"b3dea818-360c-47a0-8b92-48cb0ab39cb4\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-5mtmp" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.181274 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.185000 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vh86g\" (UniqueName: \"kubernetes.io/projected/eb7d772c-6ae9-4793-8727-653f80c2d8a0-kube-api-access-vh86g\") pod \"machine-config-controller-84d6567774-649xl\" (UID: \"eb7d772c-6ae9-4793-8727-653f80c2d8a0\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-649xl" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.188207 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/de07f99e-f06a-4800-96f5-ecfebf9630f2-client-ca\") pod \"controller-manager-879f6c89f-gk4f8\" (UID: \"de07f99e-f06a-4800-96f5-ecfebf9630f2\") " pod="openshift-controller-manager/controller-manager-879f6c89f-gk4f8" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.193756 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/2e36610d-9f23-4c7e-9980-fad03a7392d9-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-97glv\" (UID: \"2e36610d-9f23-4c7e-9980-fad03a7392d9\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-97glv" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.204370 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.209591 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/de07f99e-f06a-4800-96f5-ecfebf9630f2-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-gk4f8\" (UID: \"de07f99e-f06a-4800-96f5-ecfebf9630f2\") " pod="openshift-controller-manager/controller-manager-879f6c89f-gk4f8" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.210861 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.223244 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-gk4f8" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.225952 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-7clsb" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.232308 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.241465 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/88313132-e652-4c64-b607-5b806c93e153-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-hlnjg\" (UID: \"88313132-e652-4c64-b607-5b806c93e153\") " pod="openshift-authentication/oauth-openshift-558db77b4-hlnjg" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.273252 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-hs8xs" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.281366 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-97glv" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.284804 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/0ecce496-7852-48a4-869d-9d2e3e01cf3d-signing-key\") pod \"service-ca-9c57cc56f-67qrc\" (UID: \"0ecce496-7852-48a4-869d-9d2e3e01cf3d\") " pod="openshift-service-ca/service-ca-9c57cc56f-67qrc" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.284831 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/905136e9-422f-465a-95df-4a51cfbbc172-etcd-ca\") pod \"etcd-operator-b45778765-cr4tx\" (UID: \"905136e9-422f-465a-95df-4a51cfbbc172\") " pod="openshift-etcd-operator/etcd-operator-b45778765-cr4tx" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.284853 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/7f36de34-531b-4910-b322-3e3a2d0bf7f5-available-featuregates\") pod \"openshift-config-operator-7777fb866f-wtjgx\" (UID: \"7f36de34-531b-4910-b322-3e3a2d0bf7f5\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-wtjgx" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.284912 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/2da55ba3-88e7-4cda-8ad9-b5945e39e991-secret-volume\") pod \"collect-profiles-29330385-lc8x5\" (UID: \"2da55ba3-88e7-4cda-8ad9-b5945e39e991\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29330385-lc8x5" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.284944 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-42zwp\" (UniqueName: \"kubernetes.io/projected/863cddd5-0467-4ab9-b258-4ee6642bbe0c-kube-api-access-42zwp\") pod \"service-ca-operator-777779d784-wpqln\" (UID: \"863cddd5-0467-4ab9-b258-4ee6642bbe0c\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-wpqln" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.284978 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qqxxx\" (UniqueName: \"kubernetes.io/projected/aca7accd-6755-4e1c-be1c-2879b913e87c-kube-api-access-qqxxx\") pod \"machine-config-server-cx82k\" (UID: \"aca7accd-6755-4e1c-be1c-2879b913e87c\") " pod="openshift-machine-config-operator/machine-config-server-cx82k" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.285028 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/905136e9-422f-465a-95df-4a51cfbbc172-etcd-service-ca\") pod \"etcd-operator-b45778765-cr4tx\" (UID: \"905136e9-422f-465a-95df-4a51cfbbc172\") " pod="openshift-etcd-operator/etcd-operator-b45778765-cr4tx" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.285043 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6zcvc\" (UniqueName: \"kubernetes.io/projected/0ecce496-7852-48a4-869d-9d2e3e01cf3d-kube-api-access-6zcvc\") pod \"service-ca-9c57cc56f-67qrc\" (UID: \"0ecce496-7852-48a4-869d-9d2e3e01cf3d\") " pod="openshift-service-ca/service-ca-9c57cc56f-67qrc" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.285077 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rwpgp\" (UniqueName: \"kubernetes.io/projected/905136e9-422f-465a-95df-4a51cfbbc172-kube-api-access-rwpgp\") pod \"etcd-operator-b45778765-cr4tx\" (UID: \"905136e9-422f-465a-95df-4a51cfbbc172\") " pod="openshift-etcd-operator/etcd-operator-b45778765-cr4tx" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.285096 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/905136e9-422f-465a-95df-4a51cfbbc172-serving-cert\") pod \"etcd-operator-b45778765-cr4tx\" (UID: \"905136e9-422f-465a-95df-4a51cfbbc172\") " pod="openshift-etcd-operator/etcd-operator-b45778765-cr4tx" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.285125 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/e032dc50-8da1-4b34-981a-ec2b162cace7-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-wrx6d\" (UID: \"e032dc50-8da1-4b34-981a-ec2b162cace7\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-wrx6d" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.285142 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7f36de34-531b-4910-b322-3e3a2d0bf7f5-serving-cert\") pod \"openshift-config-operator-7777fb866f-wtjgx\" (UID: \"7f36de34-531b-4910-b322-3e3a2d0bf7f5\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-wtjgx" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.285159 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/3e90baa3-eb1c-4077-9451-a927f4c2303b-metrics-tls\") pod \"dns-operator-744455d44c-8r27m\" (UID: \"3e90baa3-eb1c-4077-9451-a927f4c2303b\") " pod="openshift-dns-operator/dns-operator-744455d44c-8r27m" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.285188 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lm4j6\" (UniqueName: \"kubernetes.io/projected/d52a533b-1e88-42e4-b9a6-e1af57eeff28-kube-api-access-lm4j6\") pod \"packageserver-d55dfcdfc-qtl66\" (UID: \"d52a533b-1e88-42e4-b9a6-e1af57eeff28\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-qtl66" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.285234 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v4xms\" (UniqueName: \"kubernetes.io/projected/2da55ba3-88e7-4cda-8ad9-b5945e39e991-kube-api-access-v4xms\") pod \"collect-profiles-29330385-lc8x5\" (UID: \"2da55ba3-88e7-4cda-8ad9-b5945e39e991\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29330385-lc8x5" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.285261 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/d52a533b-1e88-42e4-b9a6-e1af57eeff28-tmpfs\") pod \"packageserver-d55dfcdfc-qtl66\" (UID: \"d52a533b-1e88-42e4-b9a6-e1af57eeff28\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-qtl66" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.285284 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/2fcc08ca-5985-4132-a071-6821ef40cc5f-ca-trust-extracted\") pod \"image-registry-697d97f7c8-b5q5p\" (UID: \"2fcc08ca-5985-4132-a071-6821ef40cc5f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b5q5p" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.285316 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/aca7accd-6755-4e1c-be1c-2879b913e87c-node-bootstrap-token\") pod \"machine-config-server-cx82k\" (UID: \"aca7accd-6755-4e1c-be1c-2879b913e87c\") " pod="openshift-machine-config-operator/machine-config-server-cx82k" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.285348 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/2fcc08ca-5985-4132-a071-6821ef40cc5f-installation-pull-secrets\") pod \"image-registry-697d97f7c8-b5q5p\" (UID: \"2fcc08ca-5985-4132-a071-6821ef40cc5f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b5q5p" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.285362 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/905136e9-422f-465a-95df-4a51cfbbc172-config\") pod \"etcd-operator-b45778765-cr4tx\" (UID: \"905136e9-422f-465a-95df-4a51cfbbc172\") " pod="openshift-etcd-operator/etcd-operator-b45778765-cr4tx" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.285376 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/d52a533b-1e88-42e4-b9a6-e1af57eeff28-webhook-cert\") pod \"packageserver-d55dfcdfc-qtl66\" (UID: \"d52a533b-1e88-42e4-b9a6-e1af57eeff28\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-qtl66" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.285459 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/2fcc08ca-5985-4132-a071-6821ef40cc5f-trusted-ca\") pod \"image-registry-697d97f7c8-b5q5p\" (UID: \"2fcc08ca-5985-4132-a071-6821ef40cc5f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b5q5p" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.285476 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/863cddd5-0467-4ab9-b258-4ee6642bbe0c-config\") pod \"service-ca-operator-777779d784-wpqln\" (UID: \"863cddd5-0467-4ab9-b258-4ee6642bbe0c\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-wpqln" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.285527 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vgmrp\" (UniqueName: \"kubernetes.io/projected/7f36de34-531b-4910-b322-3e3a2d0bf7f5-kube-api-access-vgmrp\") pod \"openshift-config-operator-7777fb866f-wtjgx\" (UID: \"7f36de34-531b-4910-b322-3e3a2d0bf7f5\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-wtjgx" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.285562 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/2fcc08ca-5985-4132-a071-6821ef40cc5f-registry-tls\") pod \"image-registry-697d97f7c8-b5q5p\" (UID: \"2fcc08ca-5985-4132-a071-6821ef40cc5f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b5q5p" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.285580 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rc298\" (UniqueName: \"kubernetes.io/projected/e032dc50-8da1-4b34-981a-ec2b162cace7-kube-api-access-rc298\") pod \"control-plane-machine-set-operator-78cbb6b69f-wrx6d\" (UID: \"e032dc50-8da1-4b34-981a-ec2b162cace7\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-wrx6d" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.285620 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/2fcc08ca-5985-4132-a071-6821ef40cc5f-bound-sa-token\") pod \"image-registry-697d97f7c8-b5q5p\" (UID: \"2fcc08ca-5985-4132-a071-6821ef40cc5f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b5q5p" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.285664 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vk65d\" (UniqueName: \"kubernetes.io/projected/2fcc08ca-5985-4132-a071-6821ef40cc5f-kube-api-access-vk65d\") pod \"image-registry-697d97f7c8-b5q5p\" (UID: \"2fcc08ca-5985-4132-a071-6821ef40cc5f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b5q5p" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.285680 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/d52a533b-1e88-42e4-b9a6-e1af57eeff28-apiservice-cert\") pod \"packageserver-d55dfcdfc-qtl66\" (UID: \"d52a533b-1e88-42e4-b9a6-e1af57eeff28\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-qtl66" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.285706 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/2fcc08ca-5985-4132-a071-6821ef40cc5f-registry-certificates\") pod \"image-registry-697d97f7c8-b5q5p\" (UID: \"2fcc08ca-5985-4132-a071-6821ef40cc5f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b5q5p" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.285720 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/0ecce496-7852-48a4-869d-9d2e3e01cf3d-signing-cabundle\") pod \"service-ca-9c57cc56f-67qrc\" (UID: \"0ecce496-7852-48a4-869d-9d2e3e01cf3d\") " pod="openshift-service-ca/service-ca-9c57cc56f-67qrc" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.285742 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b5q5p\" (UID: \"2fcc08ca-5985-4132-a071-6821ef40cc5f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b5q5p" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.285771 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/863cddd5-0467-4ab9-b258-4ee6642bbe0c-serving-cert\") pod \"service-ca-operator-777779d784-wpqln\" (UID: \"863cddd5-0467-4ab9-b258-4ee6642bbe0c\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-wpqln" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.285790 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/aca7accd-6755-4e1c-be1c-2879b913e87c-certs\") pod \"machine-config-server-cx82k\" (UID: \"aca7accd-6755-4e1c-be1c-2879b913e87c\") " pod="openshift-machine-config-operator/machine-config-server-cx82k" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.285805 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w8vh4\" (UniqueName: \"kubernetes.io/projected/3e90baa3-eb1c-4077-9451-a927f4c2303b-kube-api-access-w8vh4\") pod \"dns-operator-744455d44c-8r27m\" (UID: \"3e90baa3-eb1c-4077-9451-a927f4c2303b\") " pod="openshift-dns-operator/dns-operator-744455d44c-8r27m" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.285834 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/905136e9-422f-465a-95df-4a51cfbbc172-etcd-client\") pod \"etcd-operator-b45778765-cr4tx\" (UID: \"905136e9-422f-465a-95df-4a51cfbbc172\") " pod="openshift-etcd-operator/etcd-operator-b45778765-cr4tx" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.285856 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bclkm\" (UniqueName: \"kubernetes.io/projected/50f94d0f-37cf-4a2e-9241-3394cbaefd14-kube-api-access-bclkm\") pod \"migrator-59844c95c7-6gfc9\" (UID: \"50f94d0f-37cf-4a2e-9241-3394cbaefd14\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-6gfc9" Oct 07 07:58:37 crc kubenswrapper[4875]: E1007 07:58:37.287993 4875 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 07:58:37.787973108 +0000 UTC m=+142.747743651 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b5q5p" (UID: "2fcc08ca-5985-4132-a071-6821ef40cc5f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.288074 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-5mtmp" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.289664 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/2da55ba3-88e7-4cda-8ad9-b5945e39e991-config-volume\") pod \"collect-profiles-29330385-lc8x5\" (UID: \"2da55ba3-88e7-4cda-8ad9-b5945e39e991\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29330385-lc8x5" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.306429 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-j9tjw" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.314026 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-vpcnf" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.321802 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-nrcsn" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.329460 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-bnk9v" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.405150 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-gjnmj"] Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.409100 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.409370 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v4xms\" (UniqueName: \"kubernetes.io/projected/2da55ba3-88e7-4cda-8ad9-b5945e39e991-kube-api-access-v4xms\") pod \"collect-profiles-29330385-lc8x5\" (UID: \"2da55ba3-88e7-4cda-8ad9-b5945e39e991\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29330385-lc8x5" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.409425 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/2fcc08ca-5985-4132-a071-6821ef40cc5f-ca-trust-extracted\") pod \"image-registry-697d97f7c8-b5q5p\" (UID: \"2fcc08ca-5985-4132-a071-6821ef40cc5f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b5q5p" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.409450 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/d52a533b-1e88-42e4-b9a6-e1af57eeff28-tmpfs\") pod \"packageserver-d55dfcdfc-qtl66\" (UID: \"d52a533b-1e88-42e4-b9a6-e1af57eeff28\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-qtl66" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.409483 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/41dcc994-8e79-4282-8389-abc86eee67b0-plugins-dir\") pod \"csi-hostpathplugin-5khj4\" (UID: \"41dcc994-8e79-4282-8389-abc86eee67b0\") " pod="hostpath-provisioner/csi-hostpathplugin-5khj4" Oct 07 07:58:37 crc kubenswrapper[4875]: E1007 07:58:37.409534 4875 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 07:58:37.909500827 +0000 UTC m=+142.869271530 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.409629 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/aca7accd-6755-4e1c-be1c-2879b913e87c-node-bootstrap-token\") pod \"machine-config-server-cx82k\" (UID: \"aca7accd-6755-4e1c-be1c-2879b913e87c\") " pod="openshift-machine-config-operator/machine-config-server-cx82k" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.409695 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/2fcc08ca-5985-4132-a071-6821ef40cc5f-installation-pull-secrets\") pod \"image-registry-697d97f7c8-b5q5p\" (UID: \"2fcc08ca-5985-4132-a071-6821ef40cc5f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b5q5p" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.409741 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/905136e9-422f-465a-95df-4a51cfbbc172-config\") pod \"etcd-operator-b45778765-cr4tx\" (UID: \"905136e9-422f-465a-95df-4a51cfbbc172\") " pod="openshift-etcd-operator/etcd-operator-b45778765-cr4tx" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.409763 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/d52a533b-1e88-42e4-b9a6-e1af57eeff28-webhook-cert\") pod \"packageserver-d55dfcdfc-qtl66\" (UID: \"d52a533b-1e88-42e4-b9a6-e1af57eeff28\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-qtl66" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.409789 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/8fe11f26-eb43-48e5-a2be-487a193ca3d3-metrics-tls\") pod \"dns-default-q7gd8\" (UID: \"8fe11f26-eb43-48e5-a2be-487a193ca3d3\") " pod="openshift-dns/dns-default-q7gd8" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.409842 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/2fcc08ca-5985-4132-a071-6821ef40cc5f-trusted-ca\") pod \"image-registry-697d97f7c8-b5q5p\" (UID: \"2fcc08ca-5985-4132-a071-6821ef40cc5f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b5q5p" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.409864 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/863cddd5-0467-4ab9-b258-4ee6642bbe0c-config\") pod \"service-ca-operator-777779d784-wpqln\" (UID: \"863cddd5-0467-4ab9-b258-4ee6642bbe0c\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-wpqln" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.409931 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vgmrp\" (UniqueName: \"kubernetes.io/projected/7f36de34-531b-4910-b322-3e3a2d0bf7f5-kube-api-access-vgmrp\") pod \"openshift-config-operator-7777fb866f-wtjgx\" (UID: \"7f36de34-531b-4910-b322-3e3a2d0bf7f5\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-wtjgx" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.409962 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-slnw2\" (UniqueName: \"kubernetes.io/projected/8fe11f26-eb43-48e5-a2be-487a193ca3d3-kube-api-access-slnw2\") pod \"dns-default-q7gd8\" (UID: \"8fe11f26-eb43-48e5-a2be-487a193ca3d3\") " pod="openshift-dns/dns-default-q7gd8" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.410002 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/41dcc994-8e79-4282-8389-abc86eee67b0-mountpoint-dir\") pod \"csi-hostpathplugin-5khj4\" (UID: \"41dcc994-8e79-4282-8389-abc86eee67b0\") " pod="hostpath-provisioner/csi-hostpathplugin-5khj4" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.410030 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/2fcc08ca-5985-4132-a071-6821ef40cc5f-registry-tls\") pod \"image-registry-697d97f7c8-b5q5p\" (UID: \"2fcc08ca-5985-4132-a071-6821ef40cc5f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b5q5p" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.410074 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rc298\" (UniqueName: \"kubernetes.io/projected/e032dc50-8da1-4b34-981a-ec2b162cace7-kube-api-access-rc298\") pod \"control-plane-machine-set-operator-78cbb6b69f-wrx6d\" (UID: \"e032dc50-8da1-4b34-981a-ec2b162cace7\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-wrx6d" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.410102 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/41dcc994-8e79-4282-8389-abc86eee67b0-socket-dir\") pod \"csi-hostpathplugin-5khj4\" (UID: \"41dcc994-8e79-4282-8389-abc86eee67b0\") " pod="hostpath-provisioner/csi-hostpathplugin-5khj4" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.410161 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/41dcc994-8e79-4282-8389-abc86eee67b0-registration-dir\") pod \"csi-hostpathplugin-5khj4\" (UID: \"41dcc994-8e79-4282-8389-abc86eee67b0\") " pod="hostpath-provisioner/csi-hostpathplugin-5khj4" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.410200 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/2fcc08ca-5985-4132-a071-6821ef40cc5f-bound-sa-token\") pod \"image-registry-697d97f7c8-b5q5p\" (UID: \"2fcc08ca-5985-4132-a071-6821ef40cc5f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b5q5p" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.410259 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vk65d\" (UniqueName: \"kubernetes.io/projected/2fcc08ca-5985-4132-a071-6821ef40cc5f-kube-api-access-vk65d\") pod \"image-registry-697d97f7c8-b5q5p\" (UID: \"2fcc08ca-5985-4132-a071-6821ef40cc5f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b5q5p" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.410283 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/d52a533b-1e88-42e4-b9a6-e1af57eeff28-apiservice-cert\") pod \"packageserver-d55dfcdfc-qtl66\" (UID: \"d52a533b-1e88-42e4-b9a6-e1af57eeff28\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-qtl66" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.410331 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/2fcc08ca-5985-4132-a071-6821ef40cc5f-registry-certificates\") pod \"image-registry-697d97f7c8-b5q5p\" (UID: \"2fcc08ca-5985-4132-a071-6821ef40cc5f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b5q5p" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.410372 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/0ecce496-7852-48a4-869d-9d2e3e01cf3d-signing-cabundle\") pod \"service-ca-9c57cc56f-67qrc\" (UID: \"0ecce496-7852-48a4-869d-9d2e3e01cf3d\") " pod="openshift-service-ca/service-ca-9c57cc56f-67qrc" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.410423 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b5q5p\" (UID: \"2fcc08ca-5985-4132-a071-6821ef40cc5f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b5q5p" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.410462 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/2fcc08ca-5985-4132-a071-6821ef40cc5f-ca-trust-extracted\") pod \"image-registry-697d97f7c8-b5q5p\" (UID: \"2fcc08ca-5985-4132-a071-6821ef40cc5f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b5q5p" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.410474 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/863cddd5-0467-4ab9-b258-4ee6642bbe0c-serving-cert\") pod \"service-ca-operator-777779d784-wpqln\" (UID: \"863cddd5-0467-4ab9-b258-4ee6642bbe0c\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-wpqln" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.410530 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/aca7accd-6755-4e1c-be1c-2879b913e87c-certs\") pod \"machine-config-server-cx82k\" (UID: \"aca7accd-6755-4e1c-be1c-2879b913e87c\") " pod="openshift-machine-config-operator/machine-config-server-cx82k" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.410592 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w8vh4\" (UniqueName: \"kubernetes.io/projected/3e90baa3-eb1c-4077-9451-a927f4c2303b-kube-api-access-w8vh4\") pod \"dns-operator-744455d44c-8r27m\" (UID: \"3e90baa3-eb1c-4077-9451-a927f4c2303b\") " pod="openshift-dns-operator/dns-operator-744455d44c-8r27m" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.410660 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/905136e9-422f-465a-95df-4a51cfbbc172-etcd-client\") pod \"etcd-operator-b45778765-cr4tx\" (UID: \"905136e9-422f-465a-95df-4a51cfbbc172\") " pod="openshift-etcd-operator/etcd-operator-b45778765-cr4tx" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.410805 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/d52a533b-1e88-42e4-b9a6-e1af57eeff28-tmpfs\") pod \"packageserver-d55dfcdfc-qtl66\" (UID: \"d52a533b-1e88-42e4-b9a6-e1af57eeff28\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-qtl66" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.414019 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bclkm\" (UniqueName: \"kubernetes.io/projected/50f94d0f-37cf-4a2e-9241-3394cbaefd14-kube-api-access-bclkm\") pod \"migrator-59844c95c7-6gfc9\" (UID: \"50f94d0f-37cf-4a2e-9241-3394cbaefd14\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-6gfc9" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.414149 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/2da55ba3-88e7-4cda-8ad9-b5945e39e991-config-volume\") pod \"collect-profiles-29330385-lc8x5\" (UID: \"2da55ba3-88e7-4cda-8ad9-b5945e39e991\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29330385-lc8x5" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.414172 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/2fcc08ca-5985-4132-a071-6821ef40cc5f-trusted-ca\") pod \"image-registry-697d97f7c8-b5q5p\" (UID: \"2fcc08ca-5985-4132-a071-6821ef40cc5f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b5q5p" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.414181 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/0ecce496-7852-48a4-869d-9d2e3e01cf3d-signing-key\") pod \"service-ca-9c57cc56f-67qrc\" (UID: \"0ecce496-7852-48a4-869d-9d2e3e01cf3d\") " pod="openshift-service-ca/service-ca-9c57cc56f-67qrc" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.414228 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/905136e9-422f-465a-95df-4a51cfbbc172-etcd-ca\") pod \"etcd-operator-b45778765-cr4tx\" (UID: \"905136e9-422f-465a-95df-4a51cfbbc172\") " pod="openshift-etcd-operator/etcd-operator-b45778765-cr4tx" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.414277 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/7f36de34-531b-4910-b322-3e3a2d0bf7f5-available-featuregates\") pod \"openshift-config-operator-7777fb866f-wtjgx\" (UID: \"7f36de34-531b-4910-b322-3e3a2d0bf7f5\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-wtjgx" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.414335 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/2da55ba3-88e7-4cda-8ad9-b5945e39e991-secret-volume\") pod \"collect-profiles-29330385-lc8x5\" (UID: \"2da55ba3-88e7-4cda-8ad9-b5945e39e991\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29330385-lc8x5" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.414379 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-42zwp\" (UniqueName: \"kubernetes.io/projected/863cddd5-0467-4ab9-b258-4ee6642bbe0c-kube-api-access-42zwp\") pod \"service-ca-operator-777779d784-wpqln\" (UID: \"863cddd5-0467-4ab9-b258-4ee6642bbe0c\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-wpqln" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.414430 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qqxxx\" (UniqueName: \"kubernetes.io/projected/aca7accd-6755-4e1c-be1c-2879b913e87c-kube-api-access-qqxxx\") pod \"machine-config-server-cx82k\" (UID: \"aca7accd-6755-4e1c-be1c-2879b913e87c\") " pod="openshift-machine-config-operator/machine-config-server-cx82k" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.414539 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/905136e9-422f-465a-95df-4a51cfbbc172-etcd-service-ca\") pod \"etcd-operator-b45778765-cr4tx\" (UID: \"905136e9-422f-465a-95df-4a51cfbbc172\") " pod="openshift-etcd-operator/etcd-operator-b45778765-cr4tx" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.414564 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6zcvc\" (UniqueName: \"kubernetes.io/projected/0ecce496-7852-48a4-869d-9d2e3e01cf3d-kube-api-access-6zcvc\") pod \"service-ca-9c57cc56f-67qrc\" (UID: \"0ecce496-7852-48a4-869d-9d2e3e01cf3d\") " pod="openshift-service-ca/service-ca-9c57cc56f-67qrc" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.414606 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rwpgp\" (UniqueName: \"kubernetes.io/projected/905136e9-422f-465a-95df-4a51cfbbc172-kube-api-access-rwpgp\") pod \"etcd-operator-b45778765-cr4tx\" (UID: \"905136e9-422f-465a-95df-4a51cfbbc172\") " pod="openshift-etcd-operator/etcd-operator-b45778765-cr4tx" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.414674 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/41dcc994-8e79-4282-8389-abc86eee67b0-csi-data-dir\") pod \"csi-hostpathplugin-5khj4\" (UID: \"41dcc994-8e79-4282-8389-abc86eee67b0\") " pod="hostpath-provisioner/csi-hostpathplugin-5khj4" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.414695 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/905136e9-422f-465a-95df-4a51cfbbc172-serving-cert\") pod \"etcd-operator-b45778765-cr4tx\" (UID: \"905136e9-422f-465a-95df-4a51cfbbc172\") " pod="openshift-etcd-operator/etcd-operator-b45778765-cr4tx" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.414731 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/8fe11f26-eb43-48e5-a2be-487a193ca3d3-config-volume\") pod \"dns-default-q7gd8\" (UID: \"8fe11f26-eb43-48e5-a2be-487a193ca3d3\") " pod="openshift-dns/dns-default-q7gd8" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.414778 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/e032dc50-8da1-4b34-981a-ec2b162cace7-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-wrx6d\" (UID: \"e032dc50-8da1-4b34-981a-ec2b162cace7\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-wrx6d" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.414798 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7f36de34-531b-4910-b322-3e3a2d0bf7f5-serving-cert\") pod \"openshift-config-operator-7777fb866f-wtjgx\" (UID: \"7f36de34-531b-4910-b322-3e3a2d0bf7f5\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-wtjgx" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.414820 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/3e90baa3-eb1c-4077-9451-a927f4c2303b-metrics-tls\") pod \"dns-operator-744455d44c-8r27m\" (UID: \"3e90baa3-eb1c-4077-9451-a927f4c2303b\") " pod="openshift-dns-operator/dns-operator-744455d44c-8r27m" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.414891 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bbrdz\" (UniqueName: \"kubernetes.io/projected/41dcc994-8e79-4282-8389-abc86eee67b0-kube-api-access-bbrdz\") pod \"csi-hostpathplugin-5khj4\" (UID: \"41dcc994-8e79-4282-8389-abc86eee67b0\") " pod="hostpath-provisioner/csi-hostpathplugin-5khj4" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.414922 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lm4j6\" (UniqueName: \"kubernetes.io/projected/d52a533b-1e88-42e4-b9a6-e1af57eeff28-kube-api-access-lm4j6\") pod \"packageserver-d55dfcdfc-qtl66\" (UID: \"d52a533b-1e88-42e4-b9a6-e1af57eeff28\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-qtl66" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.415645 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/863cddd5-0467-4ab9-b258-4ee6642bbe0c-config\") pod \"service-ca-operator-777779d784-wpqln\" (UID: \"863cddd5-0467-4ab9-b258-4ee6642bbe0c\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-wpqln" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.415915 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/7f36de34-531b-4910-b322-3e3a2d0bf7f5-available-featuregates\") pod \"openshift-config-operator-7777fb866f-wtjgx\" (UID: \"7f36de34-531b-4910-b322-3e3a2d0bf7f5\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-wtjgx" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.416192 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/0ecce496-7852-48a4-869d-9d2e3e01cf3d-signing-cabundle\") pod \"service-ca-9c57cc56f-67qrc\" (UID: \"0ecce496-7852-48a4-869d-9d2e3e01cf3d\") " pod="openshift-service-ca/service-ca-9c57cc56f-67qrc" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.419912 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/2fcc08ca-5985-4132-a071-6821ef40cc5f-registry-certificates\") pod \"image-registry-697d97f7c8-b5q5p\" (UID: \"2fcc08ca-5985-4132-a071-6821ef40cc5f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b5q5p" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.421135 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/905136e9-422f-465a-95df-4a51cfbbc172-config\") pod \"etcd-operator-b45778765-cr4tx\" (UID: \"905136e9-422f-465a-95df-4a51cfbbc172\") " pod="openshift-etcd-operator/etcd-operator-b45778765-cr4tx" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.421751 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/905136e9-422f-465a-95df-4a51cfbbc172-etcd-service-ca\") pod \"etcd-operator-b45778765-cr4tx\" (UID: \"905136e9-422f-465a-95df-4a51cfbbc172\") " pod="openshift-etcd-operator/etcd-operator-b45778765-cr4tx" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.421766 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/863cddd5-0467-4ab9-b258-4ee6642bbe0c-serving-cert\") pod \"service-ca-operator-777779d784-wpqln\" (UID: \"863cddd5-0467-4ab9-b258-4ee6642bbe0c\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-wpqln" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.422667 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-649xl" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.423179 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/d52a533b-1e88-42e4-b9a6-e1af57eeff28-apiservice-cert\") pod \"packageserver-d55dfcdfc-qtl66\" (UID: \"d52a533b-1e88-42e4-b9a6-e1af57eeff28\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-qtl66" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.423708 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/905136e9-422f-465a-95df-4a51cfbbc172-serving-cert\") pod \"etcd-operator-b45778765-cr4tx\" (UID: \"905136e9-422f-465a-95df-4a51cfbbc172\") " pod="openshift-etcd-operator/etcd-operator-b45778765-cr4tx" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.424694 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/2da55ba3-88e7-4cda-8ad9-b5945e39e991-secret-volume\") pod \"collect-profiles-29330385-lc8x5\" (UID: \"2da55ba3-88e7-4cda-8ad9-b5945e39e991\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29330385-lc8x5" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.425396 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/2fcc08ca-5985-4132-a071-6821ef40cc5f-registry-tls\") pod \"image-registry-697d97f7c8-b5q5p\" (UID: \"2fcc08ca-5985-4132-a071-6821ef40cc5f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b5q5p" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.426252 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/secret/aca7accd-6755-4e1c-be1c-2879b913e87c-certs\") pod \"machine-config-server-cx82k\" (UID: \"aca7accd-6755-4e1c-be1c-2879b913e87c\") " pod="openshift-machine-config-operator/machine-config-server-cx82k" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.427149 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/905136e9-422f-465a-95df-4a51cfbbc172-etcd-ca\") pod \"etcd-operator-b45778765-cr4tx\" (UID: \"905136e9-422f-465a-95df-4a51cfbbc172\") " pod="openshift-etcd-operator/etcd-operator-b45778765-cr4tx" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.435458 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/3e90baa3-eb1c-4077-9451-a927f4c2303b-metrics-tls\") pod \"dns-operator-744455d44c-8r27m\" (UID: \"3e90baa3-eb1c-4077-9451-a927f4c2303b\") " pod="openshift-dns-operator/dns-operator-744455d44c-8r27m" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.435740 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/2da55ba3-88e7-4cda-8ad9-b5945e39e991-config-volume\") pod \"collect-profiles-29330385-lc8x5\" (UID: \"2da55ba3-88e7-4cda-8ad9-b5945e39e991\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29330385-lc8x5" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.436260 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-rtps2"] Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.437243 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/905136e9-422f-465a-95df-4a51cfbbc172-etcd-client\") pod \"etcd-operator-b45778765-cr4tx\" (UID: \"905136e9-422f-465a-95df-4a51cfbbc172\") " pod="openshift-etcd-operator/etcd-operator-b45778765-cr4tx" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.443932 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/0ecce496-7852-48a4-869d-9d2e3e01cf3d-signing-key\") pod \"service-ca-9c57cc56f-67qrc\" (UID: \"0ecce496-7852-48a4-869d-9d2e3e01cf3d\") " pod="openshift-service-ca/service-ca-9c57cc56f-67qrc" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.448358 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/aca7accd-6755-4e1c-be1c-2879b913e87c-node-bootstrap-token\") pod \"machine-config-server-cx82k\" (UID: \"aca7accd-6755-4e1c-be1c-2879b913e87c\") " pod="openshift-machine-config-operator/machine-config-server-cx82k" Oct 07 07:58:37 crc kubenswrapper[4875]: E1007 07:58:37.453027 4875 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 07:58:37.953001183 +0000 UTC m=+142.912771726 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b5q5p" (UID: "2fcc08ca-5985-4132-a071-6821ef40cc5f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.458788 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/d52a533b-1e88-42e4-b9a6-e1af57eeff28-webhook-cert\") pod \"packageserver-d55dfcdfc-qtl66\" (UID: \"d52a533b-1e88-42e4-b9a6-e1af57eeff28\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-qtl66" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.459176 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7f36de34-531b-4910-b322-3e3a2d0bf7f5-serving-cert\") pod \"openshift-config-operator-7777fb866f-wtjgx\" (UID: \"7f36de34-531b-4910-b322-3e3a2d0bf7f5\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-wtjgx" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.459641 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/2fcc08ca-5985-4132-a071-6821ef40cc5f-installation-pull-secrets\") pod \"image-registry-697d97f7c8-b5q5p\" (UID: \"2fcc08ca-5985-4132-a071-6821ef40cc5f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b5q5p" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.461051 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-vpcnf" event={"ID":"9e48afb0-e9d1-4d51-a992-2de00ba12ec6","Type":"ContainerStarted","Data":"41b20bfce6090bf8379a642249a5bf714f2d6d27803e06fb456a519af3f093d7"} Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.462833 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-cf7b2"] Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.473761 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/e032dc50-8da1-4b34-981a-ec2b162cace7-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-wrx6d\" (UID: \"e032dc50-8da1-4b34-981a-ec2b162cace7\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-wrx6d" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.473756 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-q5g8q" event={"ID":"ace64483-508c-4ffc-9519-0186712eea32","Type":"ContainerStarted","Data":"519dc8319daaedca0754ad5cc14fa0e4b33188beabd616e9a4a93a5bd727d704"} Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.473840 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-q5g8q" event={"ID":"ace64483-508c-4ffc-9519-0186712eea32","Type":"ContainerStarted","Data":"9ae21b6678714941743da378857af3711ac0186d41f2e4ed447c90ac334bd1d3"} Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.494217 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/2fcc08ca-5985-4132-a071-6821ef40cc5f-bound-sa-token\") pod \"image-registry-697d97f7c8-b5q5p\" (UID: \"2fcc08ca-5985-4132-a071-6821ef40cc5f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b5q5p" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.512763 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v4xms\" (UniqueName: \"kubernetes.io/projected/2da55ba3-88e7-4cda-8ad9-b5945e39e991-kube-api-access-v4xms\") pod \"collect-profiles-29330385-lc8x5\" (UID: \"2da55ba3-88e7-4cda-8ad9-b5945e39e991\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29330385-lc8x5" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.516828 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.517955 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-slnw2\" (UniqueName: \"kubernetes.io/projected/8fe11f26-eb43-48e5-a2be-487a193ca3d3-kube-api-access-slnw2\") pod \"dns-default-q7gd8\" (UID: \"8fe11f26-eb43-48e5-a2be-487a193ca3d3\") " pod="openshift-dns/dns-default-q7gd8" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.518004 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/41dcc994-8e79-4282-8389-abc86eee67b0-mountpoint-dir\") pod \"csi-hostpathplugin-5khj4\" (UID: \"41dcc994-8e79-4282-8389-abc86eee67b0\") " pod="hostpath-provisioner/csi-hostpathplugin-5khj4" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.518034 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/41dcc994-8e79-4282-8389-abc86eee67b0-socket-dir\") pod \"csi-hostpathplugin-5khj4\" (UID: \"41dcc994-8e79-4282-8389-abc86eee67b0\") " pod="hostpath-provisioner/csi-hostpathplugin-5khj4" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.518083 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/41dcc994-8e79-4282-8389-abc86eee67b0-registration-dir\") pod \"csi-hostpathplugin-5khj4\" (UID: \"41dcc994-8e79-4282-8389-abc86eee67b0\") " pod="hostpath-provisioner/csi-hostpathplugin-5khj4" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.518333 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/41dcc994-8e79-4282-8389-abc86eee67b0-csi-data-dir\") pod \"csi-hostpathplugin-5khj4\" (UID: \"41dcc994-8e79-4282-8389-abc86eee67b0\") " pod="hostpath-provisioner/csi-hostpathplugin-5khj4" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.518454 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/41dcc994-8e79-4282-8389-abc86eee67b0-mountpoint-dir\") pod \"csi-hostpathplugin-5khj4\" (UID: \"41dcc994-8e79-4282-8389-abc86eee67b0\") " pod="hostpath-provisioner/csi-hostpathplugin-5khj4" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.518508 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/8fe11f26-eb43-48e5-a2be-487a193ca3d3-config-volume\") pod \"dns-default-q7gd8\" (UID: \"8fe11f26-eb43-48e5-a2be-487a193ca3d3\") " pod="openshift-dns/dns-default-q7gd8" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.519720 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vgmrp\" (UniqueName: \"kubernetes.io/projected/7f36de34-531b-4910-b322-3e3a2d0bf7f5-kube-api-access-vgmrp\") pod \"openshift-config-operator-7777fb866f-wtjgx\" (UID: \"7f36de34-531b-4910-b322-3e3a2d0bf7f5\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-wtjgx" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.529185 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-wccw7"] Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.534522 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-72b9h"] Oct 07 07:58:37 crc kubenswrapper[4875]: E1007 07:58:37.543141 4875 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 07:58:38.043117317 +0000 UTC m=+143.002887860 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.543447 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/41dcc994-8e79-4282-8389-abc86eee67b0-registration-dir\") pod \"csi-hostpathplugin-5khj4\" (UID: \"41dcc994-8e79-4282-8389-abc86eee67b0\") " pod="hostpath-provisioner/csi-hostpathplugin-5khj4" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.543551 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/41dcc994-8e79-4282-8389-abc86eee67b0-socket-dir\") pod \"csi-hostpathplugin-5khj4\" (UID: \"41dcc994-8e79-4282-8389-abc86eee67b0\") " pod="hostpath-provisioner/csi-hostpathplugin-5khj4" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.544238 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-hlnjg" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.544565 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/41dcc994-8e79-4282-8389-abc86eee67b0-csi-data-dir\") pod \"csi-hostpathplugin-5khj4\" (UID: \"41dcc994-8e79-4282-8389-abc86eee67b0\") " pod="hostpath-provisioner/csi-hostpathplugin-5khj4" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.544860 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bbrdz\" (UniqueName: \"kubernetes.io/projected/41dcc994-8e79-4282-8389-abc86eee67b0-kube-api-access-bbrdz\") pod \"csi-hostpathplugin-5khj4\" (UID: \"41dcc994-8e79-4282-8389-abc86eee67b0\") " pod="hostpath-provisioner/csi-hostpathplugin-5khj4" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.545125 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/41dcc994-8e79-4282-8389-abc86eee67b0-plugins-dir\") pod \"csi-hostpathplugin-5khj4\" (UID: \"41dcc994-8e79-4282-8389-abc86eee67b0\") " pod="hostpath-provisioner/csi-hostpathplugin-5khj4" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.545203 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/8fe11f26-eb43-48e5-a2be-487a193ca3d3-metrics-tls\") pod \"dns-default-q7gd8\" (UID: \"8fe11f26-eb43-48e5-a2be-487a193ca3d3\") " pod="openshift-dns/dns-default-q7gd8" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.545303 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/41dcc994-8e79-4282-8389-abc86eee67b0-plugins-dir\") pod \"csi-hostpathplugin-5khj4\" (UID: \"41dcc994-8e79-4282-8389-abc86eee67b0\") " pod="hostpath-provisioner/csi-hostpathplugin-5khj4" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.545953 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/8fe11f26-eb43-48e5-a2be-487a193ca3d3-config-volume\") pod \"dns-default-q7gd8\" (UID: \"8fe11f26-eb43-48e5-a2be-487a193ca3d3\") " pod="openshift-dns/dns-default-q7gd8" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.547109 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vk65d\" (UniqueName: \"kubernetes.io/projected/2fcc08ca-5985-4132-a071-6821ef40cc5f-kube-api-access-vk65d\") pod \"image-registry-697d97f7c8-b5q5p\" (UID: \"2fcc08ca-5985-4132-a071-6821ef40cc5f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b5q5p" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.548460 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/8fe11f26-eb43-48e5-a2be-487a193ca3d3-metrics-tls\") pod \"dns-default-q7gd8\" (UID: \"8fe11f26-eb43-48e5-a2be-487a193ca3d3\") " pod="openshift-dns/dns-default-q7gd8" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.567199 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rc298\" (UniqueName: \"kubernetes.io/projected/e032dc50-8da1-4b34-981a-ec2b162cace7-kube-api-access-rc298\") pod \"control-plane-machine-set-operator-78cbb6b69f-wrx6d\" (UID: \"e032dc50-8da1-4b34-981a-ec2b162cace7\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-wrx6d" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.577390 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rwpgp\" (UniqueName: \"kubernetes.io/projected/905136e9-422f-465a-95df-4a51cfbbc172-kube-api-access-rwpgp\") pod \"etcd-operator-b45778765-cr4tx\" (UID: \"905136e9-422f-465a-95df-4a51cfbbc172\") " pod="openshift-etcd-operator/etcd-operator-b45778765-cr4tx" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.588318 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-zdjs2"] Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.590402 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-p2bx4"] Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.599268 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29330385-lc8x5" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.599302 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-4fthd"] Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.600950 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-42zwp\" (UniqueName: \"kubernetes.io/projected/863cddd5-0467-4ab9-b258-4ee6642bbe0c-kube-api-access-42zwp\") pod \"service-ca-operator-777779d784-wpqln\" (UID: \"863cddd5-0467-4ab9-b258-4ee6642bbe0c\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-wpqln" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.620010 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bclkm\" (UniqueName: \"kubernetes.io/projected/50f94d0f-37cf-4a2e-9241-3394cbaefd14-kube-api-access-bclkm\") pod \"migrator-59844c95c7-6gfc9\" (UID: \"50f94d0f-37cf-4a2e-9241-3394cbaefd14\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-6gfc9" Oct 07 07:58:37 crc kubenswrapper[4875]: W1007 07:58:37.628494 4875 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8aaee5e1_e8fd_430c_bc6b_7ac25c49f95b.slice/crio-3d73eaf95ea35fd5bb1efc6b4f4137fbc95b6975e1b346c34f5b19756cd082fc WatchSource:0}: Error finding container 3d73eaf95ea35fd5bb1efc6b4f4137fbc95b6975e1b346c34f5b19756cd082fc: Status 404 returned error can't find the container with id 3d73eaf95ea35fd5bb1efc6b4f4137fbc95b6975e1b346c34f5b19756cd082fc Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.636350 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w8vh4\" (UniqueName: \"kubernetes.io/projected/3e90baa3-eb1c-4077-9451-a927f4c2303b-kube-api-access-w8vh4\") pod \"dns-operator-744455d44c-8r27m\" (UID: \"3e90baa3-eb1c-4077-9451-a927f4c2303b\") " pod="openshift-dns-operator/dns-operator-744455d44c-8r27m" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.647703 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b5q5p\" (UID: \"2fcc08ca-5985-4132-a071-6821ef40cc5f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b5q5p" Oct 07 07:58:37 crc kubenswrapper[4875]: E1007 07:58:37.648663 4875 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 07:58:38.148643786 +0000 UTC m=+143.108414339 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b5q5p" (UID: "2fcc08ca-5985-4132-a071-6821ef40cc5f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.666358 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6zcvc\" (UniqueName: \"kubernetes.io/projected/0ecce496-7852-48a4-869d-9d2e3e01cf3d-kube-api-access-6zcvc\") pod \"service-ca-9c57cc56f-67qrc\" (UID: \"0ecce496-7852-48a4-869d-9d2e3e01cf3d\") " pod="openshift-service-ca/service-ca-9c57cc56f-67qrc" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.676782 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lm4j6\" (UniqueName: \"kubernetes.io/projected/d52a533b-1e88-42e4-b9a6-e1af57eeff28-kube-api-access-lm4j6\") pod \"packageserver-d55dfcdfc-qtl66\" (UID: \"d52a533b-1e88-42e4-b9a6-e1af57eeff28\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-qtl66" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.694452 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qqxxx\" (UniqueName: \"kubernetes.io/projected/aca7accd-6755-4e1c-be1c-2879b913e87c-kube-api-access-qqxxx\") pod \"machine-config-server-cx82k\" (UID: \"aca7accd-6755-4e1c-be1c-2879b913e87c\") " pod="openshift-machine-config-operator/machine-config-server-cx82k" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.714293 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-cr4tx" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.728961 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-6gfc9" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.729429 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-6svk5"] Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.748907 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 07:58:37 crc kubenswrapper[4875]: E1007 07:58:37.749243 4875 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 07:58:38.249228013 +0000 UTC m=+143.208998556 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.755848 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-slnw2\" (UniqueName: \"kubernetes.io/projected/8fe11f26-eb43-48e5-a2be-487a193ca3d3-kube-api-access-slnw2\") pod \"dns-default-q7gd8\" (UID: \"8fe11f26-eb43-48e5-a2be-487a193ca3d3\") " pod="openshift-dns/dns-default-q7gd8" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.757378 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-pxkvz"] Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.766425 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-zpc28"] Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.767577 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bbrdz\" (UniqueName: \"kubernetes.io/projected/41dcc994-8e79-4282-8389-abc86eee67b0-kube-api-access-bbrdz\") pod \"csi-hostpathplugin-5khj4\" (UID: \"41dcc994-8e79-4282-8389-abc86eee67b0\") " pod="hostpath-provisioner/csi-hostpathplugin-5khj4" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.767690 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-rffld"] Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.772136 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-wsp6s"] Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.783337 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-57m8s"] Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.802586 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-wrx6d" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.809824 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-wtjgx" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.830615 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-wpqln" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.836911 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-qtl66" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.846113 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-8r27m" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.850779 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b5q5p\" (UID: \"2fcc08ca-5985-4132-a071-6821ef40cc5f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b5q5p" Oct 07 07:58:37 crc kubenswrapper[4875]: E1007 07:58:37.851800 4875 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 07:58:38.35177746 +0000 UTC m=+143.311548003 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b5q5p" (UID: "2fcc08ca-5985-4132-a071-6821ef40cc5f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.866630 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-67qrc" Oct 07 07:58:37 crc kubenswrapper[4875]: W1007 07:58:37.915370 4875 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0ee20e7b_f9fe_4717_bad6_16ca5936e100.slice/crio-b4fbf26e27498682f15de150350c52ef35acb6023cda5824e933e733fb516bfd WatchSource:0}: Error finding container b4fbf26e27498682f15de150350c52ef35acb6023cda5824e933e733fb516bfd: Status 404 returned error can't find the container with id b4fbf26e27498682f15de150350c52ef35acb6023cda5824e933e733fb516bfd Oct 07 07:58:37 crc kubenswrapper[4875]: W1007 07:58:37.916449 4875 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode89a2d49_34f1_4e24_8b62_5bd8bfe39b8b.slice/crio-49731928d4d737e9f405b10f3ba1fe3d7f9c900fadafc5326febba0d7f020538 WatchSource:0}: Error finding container 49731928d4d737e9f405b10f3ba1fe3d7f9c900fadafc5326febba0d7f020538: Status 404 returned error can't find the container with id 49731928d4d737e9f405b10f3ba1fe3d7f9c900fadafc5326febba0d7f020538 Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.936179 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-cx82k" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.954973 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-q7gd8" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.955530 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 07:58:37 crc kubenswrapper[4875]: E1007 07:58:37.959632 4875 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 07:58:38.459612539 +0000 UTC m=+143.419383082 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.960680 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b5q5p\" (UID: \"2fcc08ca-5985-4132-a071-6821ef40cc5f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b5q5p" Oct 07 07:58:37 crc kubenswrapper[4875]: E1007 07:58:37.961067 4875 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 07:58:38.461030862 +0000 UTC m=+143.420801405 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b5q5p" (UID: "2fcc08ca-5985-4132-a071-6821ef40cc5f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.970137 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-lqcfl"] Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.972770 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-5khj4" Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.984605 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-8x4x6"] Oct 07 07:58:37 crc kubenswrapper[4875]: I1007 07:58:37.997350 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-97glv"] Oct 07 07:58:38 crc kubenswrapper[4875]: I1007 07:58:38.007939 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-mgh7p"] Oct 07 07:58:38 crc kubenswrapper[4875]: I1007 07:58:38.062081 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 07:58:38 crc kubenswrapper[4875]: E1007 07:58:38.062409 4875 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 07:58:38.562343932 +0000 UTC m=+143.522114475 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 07:58:38 crc kubenswrapper[4875]: I1007 07:58:38.062649 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b5q5p\" (UID: \"2fcc08ca-5985-4132-a071-6821ef40cc5f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b5q5p" Oct 07 07:58:38 crc kubenswrapper[4875]: E1007 07:58:38.063520 4875 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 07:58:38.563506287 +0000 UTC m=+143.523276830 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b5q5p" (UID: "2fcc08ca-5985-4132-a071-6821ef40cc5f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 07:58:38 crc kubenswrapper[4875]: I1007 07:58:38.168288 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 07:58:38 crc kubenswrapper[4875]: E1007 07:58:38.172727 4875 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 07:58:38.672697828 +0000 UTC m=+143.632468371 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 07:58:38 crc kubenswrapper[4875]: I1007 07:58:38.186718 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-7clsb"] Oct 07 07:58:38 crc kubenswrapper[4875]: I1007 07:58:38.268167 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-gk4f8"] Oct 07 07:58:38 crc kubenswrapper[4875]: I1007 07:58:38.269992 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-hs8xs"] Oct 07 07:58:38 crc kubenswrapper[4875]: I1007 07:58:38.272576 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-nrcsn"] Oct 07 07:58:38 crc kubenswrapper[4875]: I1007 07:58:38.273286 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b5q5p\" (UID: \"2fcc08ca-5985-4132-a071-6821ef40cc5f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b5q5p" Oct 07 07:58:38 crc kubenswrapper[4875]: E1007 07:58:38.273612 4875 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 07:58:38.773600574 +0000 UTC m=+143.733371117 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b5q5p" (UID: "2fcc08ca-5985-4132-a071-6821ef40cc5f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 07:58:38 crc kubenswrapper[4875]: I1007 07:58:38.277777 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-bnk9v"] Oct 07 07:58:38 crc kubenswrapper[4875]: I1007 07:58:38.298797 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-5mtmp"] Oct 07 07:58:38 crc kubenswrapper[4875]: I1007 07:58:38.300990 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-j9tjw"] Oct 07 07:58:38 crc kubenswrapper[4875]: I1007 07:58:38.384258 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 07:58:38 crc kubenswrapper[4875]: E1007 07:58:38.384527 4875 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 07:58:38.884504378 +0000 UTC m=+143.844274931 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 07:58:38 crc kubenswrapper[4875]: I1007 07:58:38.384793 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b5q5p\" (UID: \"2fcc08ca-5985-4132-a071-6821ef40cc5f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b5q5p" Oct 07 07:58:38 crc kubenswrapper[4875]: E1007 07:58:38.385665 4875 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 07:58:38.885651813 +0000 UTC m=+143.845422356 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b5q5p" (UID: "2fcc08ca-5985-4132-a071-6821ef40cc5f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 07:58:38 crc kubenswrapper[4875]: I1007 07:58:38.455511 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-6gfc9"] Oct 07 07:58:38 crc kubenswrapper[4875]: W1007 07:58:38.465609 4875 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb3dea818_360c_47a0_8b92_48cb0ab39cb4.slice/crio-14bbfae386090ca7aff7949609a236cabb0691e036e70f132ece51f625871dca WatchSource:0}: Error finding container 14bbfae386090ca7aff7949609a236cabb0691e036e70f132ece51f625871dca: Status 404 returned error can't find the container with id 14bbfae386090ca7aff7949609a236cabb0691e036e70f132ece51f625871dca Oct 07 07:58:38 crc kubenswrapper[4875]: I1007 07:58:38.477150 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-hlnjg"] Oct 07 07:58:38 crc kubenswrapper[4875]: I1007 07:58:38.487993 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29330385-lc8x5"] Oct 07 07:58:38 crc kubenswrapper[4875]: I1007 07:58:38.488382 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 07:58:38 crc kubenswrapper[4875]: E1007 07:58:38.488722 4875 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 07:58:38.988705616 +0000 UTC m=+143.948476159 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 07:58:38 crc kubenswrapper[4875]: I1007 07:58:38.490481 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-lqcfl" event={"ID":"67bb7921-f940-46a4-80c8-aa1aeae3b33b","Type":"ContainerStarted","Data":"f0e173bd75b0f9593c7f138c4281a57d616f62610b0f5f34f100c0ce51a60f84"} Oct 07 07:58:38 crc kubenswrapper[4875]: I1007 07:58:38.492727 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-649xl"] Oct 07 07:58:38 crc kubenswrapper[4875]: I1007 07:58:38.492798 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-rtps2" event={"ID":"ebf98430-6317-40a9-be26-84a817cfbf1e","Type":"ContainerStarted","Data":"04d5620e59f727f7ddf0984b18675ff6cbb3844e1905325a5a79df0ec3f1ccbd"} Oct 07 07:58:38 crc kubenswrapper[4875]: I1007 07:58:38.492834 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-rtps2" event={"ID":"ebf98430-6317-40a9-be26-84a817cfbf1e","Type":"ContainerStarted","Data":"bb37a136fee6549f30c5b39e7d4527a90fedfd148941ac7df9a92836c3ac42a5"} Oct 07 07:58:38 crc kubenswrapper[4875]: I1007 07:58:38.493724 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console-operator/console-operator-58897d9998-rtps2" Oct 07 07:58:38 crc kubenswrapper[4875]: I1007 07:58:38.506556 4875 patch_prober.go:28] interesting pod/console-operator-58897d9998-rtps2 container/console-operator namespace/openshift-console-operator: Readiness probe status=failure output="Get \"https://10.217.0.10:8443/readyz\": dial tcp 10.217.0.10:8443: connect: connection refused" start-of-body= Oct 07 07:58:38 crc kubenswrapper[4875]: I1007 07:58:38.506652 4875 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console-operator/console-operator-58897d9998-rtps2" podUID="ebf98430-6317-40a9-be26-84a817cfbf1e" containerName="console-operator" probeResult="failure" output="Get \"https://10.217.0.10:8443/readyz\": dial tcp 10.217.0.10:8443: connect: connection refused" Oct 07 07:58:38 crc kubenswrapper[4875]: I1007 07:58:38.513330 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-wccw7" event={"ID":"fd8a9079-4c7c-467a-9e0d-fa0c2bc15482","Type":"ContainerStarted","Data":"5763155efae84b2a9f4b922c20b176bb5934b95c994ed12a7b2cbea89160415a"} Oct 07 07:58:38 crc kubenswrapper[4875]: I1007 07:58:38.513410 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-wccw7" event={"ID":"fd8a9079-4c7c-467a-9e0d-fa0c2bc15482","Type":"ContainerStarted","Data":"1b525f2ebbd53475b6527e53b3c0ca49a8bbeed6900600ef4998550d316b0e5a"} Oct 07 07:58:38 crc kubenswrapper[4875]: I1007 07:58:38.551520 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-6svk5" event={"ID":"ad43d1f1-e9d0-400f-87fc-d397aebd5473","Type":"ContainerStarted","Data":"464891966d0b78188804450c751f09a109052241828487f12a0b26f3fb1cc345"} Oct 07 07:58:38 crc kubenswrapper[4875]: I1007 07:58:38.556568 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-gjnmj" event={"ID":"51cad0ca-3fc8-486e-a7e9-05c470121cb8","Type":"ContainerStarted","Data":"9e8e7c601d14f0c60207bffbaed5156705e5105d2996ba4cf2e9d6660d8919c8"} Oct 07 07:58:38 crc kubenswrapper[4875]: I1007 07:58:38.556649 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-gjnmj" event={"ID":"51cad0ca-3fc8-486e-a7e9-05c470121cb8","Type":"ContainerStarted","Data":"1e3b176da764028f41acca1d47cf98f649b5679a4ed5d0b9e6565099414e4a1d"} Oct 07 07:58:38 crc kubenswrapper[4875]: I1007 07:58:38.566041 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-8x4x6" event={"ID":"0d7070c6-e24b-4b05-b574-92b15066833c","Type":"ContainerStarted","Data":"6747bb407302d3a9fef58813b2739cadc90b1c0e3505638fe53658e5b5b76cb1"} Oct 07 07:58:38 crc kubenswrapper[4875]: W1007 07:58:38.566286 4875 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod50f94d0f_37cf_4a2e_9241_3394cbaefd14.slice/crio-78a724149fabec031f38465330a8e8d105425bcb7aa65c954d927dd0cef56d04 WatchSource:0}: Error finding container 78a724149fabec031f38465330a8e8d105425bcb7aa65c954d927dd0cef56d04: Status 404 returned error can't find the container with id 78a724149fabec031f38465330a8e8d105425bcb7aa65c954d927dd0cef56d04 Oct 07 07:58:38 crc kubenswrapper[4875]: I1007 07:58:38.569442 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-nrcsn" event={"ID":"703e43ba-ed43-4ffc-92e3-0063568dcefc","Type":"ContainerStarted","Data":"91e6def8d1e41400e9b7027652e7adeb9596b2c00144e86dc53f7bc295ac2428"} Oct 07 07:58:38 crc kubenswrapper[4875]: I1007 07:58:38.576819 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-zdjs2" event={"ID":"8aaee5e1-e8fd-430c-bc6b-7ac25c49f95b","Type":"ContainerDied","Data":"f8a63e12c99e3127fa9d85267bf4e3839ab6f9ff863392fddd314d732e997704"} Oct 07 07:58:38 crc kubenswrapper[4875]: I1007 07:58:38.578610 4875 generic.go:334] "Generic (PLEG): container finished" podID="8aaee5e1-e8fd-430c-bc6b-7ac25c49f95b" containerID="f8a63e12c99e3127fa9d85267bf4e3839ab6f9ff863392fddd314d732e997704" exitCode=0 Oct 07 07:58:38 crc kubenswrapper[4875]: I1007 07:58:38.578752 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-zdjs2" event={"ID":"8aaee5e1-e8fd-430c-bc6b-7ac25c49f95b","Type":"ContainerStarted","Data":"3d73eaf95ea35fd5bb1efc6b4f4137fbc95b6975e1b346c34f5b19756cd082fc"} Oct 07 07:58:38 crc kubenswrapper[4875]: I1007 07:58:38.585241 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-7clsb" event={"ID":"e612704a-aea6-48f6-82c1-cee4e0e77859","Type":"ContainerStarted","Data":"120c7279034d2691ac30bda4a258e1654933761ea2ec172d8824efe123b383c3"} Oct 07 07:58:38 crc kubenswrapper[4875]: I1007 07:58:38.589758 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b5q5p\" (UID: \"2fcc08ca-5985-4132-a071-6821ef40cc5f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b5q5p" Oct 07 07:58:38 crc kubenswrapper[4875]: E1007 07:58:38.590094 4875 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 07:58:39.090081936 +0000 UTC m=+144.049852469 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b5q5p" (UID: "2fcc08ca-5985-4132-a071-6821ef40cc5f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 07:58:38 crc kubenswrapper[4875]: I1007 07:58:38.604354 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-57m8s" event={"ID":"ddc50d0d-840b-41e8-821f-d4e97916051a","Type":"ContainerStarted","Data":"bdf0632dc0e05a6f7d8d6fdbde0d00b1ce10233c362c4ad594ddefb9400ee453"} Oct 07 07:58:38 crc kubenswrapper[4875]: I1007 07:58:38.621819 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-mgh7p" event={"ID":"d5c0d7ea-c293-4a7d-ba73-3fceeb8b6ea4","Type":"ContainerStarted","Data":"7acc3b9ec740a1733a6582c73123a0e841466f80b53aa521145639d66ab676fe"} Oct 07 07:58:38 crc kubenswrapper[4875]: W1007 07:58:38.626304 4875 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2da55ba3_88e7_4cda_8ad9_b5945e39e991.slice/crio-1c0815cfd8ddb9b5a4ab536fb4a22a2f12a89701ed356e137ee53ec2a2384a50 WatchSource:0}: Error finding container 1c0815cfd8ddb9b5a4ab536fb4a22a2f12a89701ed356e137ee53ec2a2384a50: Status 404 returned error can't find the container with id 1c0815cfd8ddb9b5a4ab536fb4a22a2f12a89701ed356e137ee53ec2a2384a50 Oct 07 07:58:38 crc kubenswrapper[4875]: I1007 07:58:38.633749 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-72b9h" event={"ID":"550c8dbf-53c3-44c9-87c8-d7bc275f384e","Type":"ContainerStarted","Data":"3580aaa462922b20b3de4964c56df6266fd9a09a8ecdcc2f585219ace1b060ac"} Oct 07 07:58:38 crc kubenswrapper[4875]: I1007 07:58:38.634215 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-72b9h" event={"ID":"550c8dbf-53c3-44c9-87c8-d7bc275f384e","Type":"ContainerStarted","Data":"5c8c7801cefd256d5aca474b708126a822d389544d9b2f495699e4ffe6e8db56"} Oct 07 07:58:38 crc kubenswrapper[4875]: I1007 07:58:38.646607 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-q5g8q" event={"ID":"ace64483-508c-4ffc-9519-0186712eea32","Type":"ContainerStarted","Data":"90f342b6ff711ffe52b5b7862d20c5d777173370f8a878fe963bbcdab15f58c3"} Oct 07 07:58:38 crc kubenswrapper[4875]: I1007 07:58:38.649761 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-5mtmp" event={"ID":"b3dea818-360c-47a0-8b92-48cb0ab39cb4","Type":"ContainerStarted","Data":"14bbfae386090ca7aff7949609a236cabb0691e036e70f132ece51f625871dca"} Oct 07 07:58:38 crc kubenswrapper[4875]: I1007 07:58:38.654571 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-j9tjw" event={"ID":"83061376-ca6f-4ccc-8da0-bede4a497b4f","Type":"ContainerStarted","Data":"f5ee1f28e21c4fb7581f67be934a729d904b0d956e1269307a084fce4ee66b4c"} Oct 07 07:58:38 crc kubenswrapper[4875]: I1007 07:58:38.656510 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-97glv" event={"ID":"2e36610d-9f23-4c7e-9980-fad03a7392d9","Type":"ContainerStarted","Data":"6aeda9fe37224991db20e5ce7e8b7fb5eea9fc6dffa22c08938cb7ef7f3c89b8"} Oct 07 07:58:38 crc kubenswrapper[4875]: I1007 07:58:38.657799 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-gk4f8" event={"ID":"de07f99e-f06a-4800-96f5-ecfebf9630f2","Type":"ContainerStarted","Data":"b1c30cbccdb858fd5ebb00b9f84f657ecec36474f5f4d36a1d7a0906b4b6b936"} Oct 07 07:58:38 crc kubenswrapper[4875]: I1007 07:58:38.662420 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-rffld" event={"ID":"0ee20e7b-f9fe-4717-bad6-16ca5936e100","Type":"ContainerStarted","Data":"b4fbf26e27498682f15de150350c52ef35acb6023cda5824e933e733fb516bfd"} Oct 07 07:58:38 crc kubenswrapper[4875]: I1007 07:58:38.666937 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-hs8xs" event={"ID":"b405a105-6441-41b4-90c2-ee4f6e07fb68","Type":"ContainerStarted","Data":"2181e6dce9f1e269ecbd557b9362e3140b05839d66ee689c879ef409e090635b"} Oct 07 07:58:38 crc kubenswrapper[4875]: I1007 07:58:38.670214 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-wsp6s" event={"ID":"e89a2d49-34f1-4e24-8b62-5bd8bfe39b8b","Type":"ContainerStarted","Data":"49731928d4d737e9f405b10f3ba1fe3d7f9c900fadafc5326febba0d7f020538"} Oct 07 07:58:38 crc kubenswrapper[4875]: I1007 07:58:38.674173 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-bnk9v" event={"ID":"f363016c-d18e-4d16-96e3-35b871d6f130","Type":"ContainerStarted","Data":"84ed5fe596c4b8b547d90e9234ddf1ee95c2b22f3a4d36a0d0792849b167ecbc"} Oct 07 07:58:38 crc kubenswrapper[4875]: I1007 07:58:38.692836 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 07:58:38 crc kubenswrapper[4875]: I1007 07:58:38.694599 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-cf7b2" event={"ID":"c26c9450-7ff8-4142-b007-01a00adbb28d","Type":"ContainerStarted","Data":"c658d911a47905d896e249513cab92d2baa881c89e470b836749789321561d38"} Oct 07 07:58:38 crc kubenswrapper[4875]: I1007 07:58:38.694637 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-cf7b2" event={"ID":"c26c9450-7ff8-4142-b007-01a00adbb28d","Type":"ContainerStarted","Data":"19ef1c4a0bfdc1399aefe9822e38dc9d3c7f0a394f854dfb62cd6df04800d582"} Oct 07 07:58:38 crc kubenswrapper[4875]: I1007 07:58:38.719129 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-cf7b2" Oct 07 07:58:38 crc kubenswrapper[4875]: E1007 07:58:38.722460 4875 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 07:58:39.222396537 +0000 UTC m=+144.182167080 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 07:58:38 crc kubenswrapper[4875]: I1007 07:58:38.722870 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b5q5p\" (UID: \"2fcc08ca-5985-4132-a071-6821ef40cc5f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b5q5p" Oct 07 07:58:38 crc kubenswrapper[4875]: I1007 07:58:38.736150 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-cr4tx"] Oct 07 07:58:38 crc kubenswrapper[4875]: I1007 07:58:38.747155 4875 patch_prober.go:28] interesting pod/route-controller-manager-6576b87f9c-cf7b2 container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.30:8443/healthz\": dial tcp 10.217.0.30:8443: connect: connection refused" start-of-body= Oct 07 07:58:38 crc kubenswrapper[4875]: E1007 07:58:38.773869 4875 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 07:58:39.273841846 +0000 UTC m=+144.233612389 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b5q5p" (UID: "2fcc08ca-5985-4132-a071-6821ef40cc5f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 07:58:38 crc kubenswrapper[4875]: I1007 07:58:38.778290 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-p2bx4" event={"ID":"4e8107af-7b4e-44e0-88f3-47fa08be03fa","Type":"ContainerStarted","Data":"aad4d1a9c263834daafd1c801c786357ce83dd0d10e0b8145e49b8fc1aa82777"} Oct 07 07:58:38 crc kubenswrapper[4875]: I1007 07:58:38.778333 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-p2bx4" event={"ID":"4e8107af-7b4e-44e0-88f3-47fa08be03fa","Type":"ContainerStarted","Data":"6a3593b267cd3b54d7361126887a3f7a0b543b7afeedc6f40ff39d2061dcba4e"} Oct 07 07:58:38 crc kubenswrapper[4875]: I1007 07:58:38.747276 4875 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-cf7b2" podUID="c26c9450-7ff8-4142-b007-01a00adbb28d" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.30:8443/healthz\": dial tcp 10.217.0.30:8443: connect: connection refused" Oct 07 07:58:38 crc kubenswrapper[4875]: I1007 07:58:38.782669 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-qtl66"] Oct 07 07:58:38 crc kubenswrapper[4875]: I1007 07:58:38.782750 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-wrx6d"] Oct 07 07:58:38 crc kubenswrapper[4875]: I1007 07:58:38.785408 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-wtjgx"] Oct 07 07:58:38 crc kubenswrapper[4875]: I1007 07:58:38.793007 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-q7gd8"] Oct 07 07:58:38 crc kubenswrapper[4875]: I1007 07:58:38.795692 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-67qrc"] Oct 07 07:58:38 crc kubenswrapper[4875]: I1007 07:58:38.817790 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-4fthd" event={"ID":"97431ef0-70e0-4c93-9ebc-ba3c9823685f","Type":"ContainerStarted","Data":"7e19ad02c3e41f764eb366bf214ce0e31f0895c0b20827dbc9a18c1e1cc097db"} Oct 07 07:58:38 crc kubenswrapper[4875]: I1007 07:58:38.817908 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-4fthd" event={"ID":"97431ef0-70e0-4c93-9ebc-ba3c9823685f","Type":"ContainerStarted","Data":"141b25bb399946d5c023339e1468158d56ec6e0a782324347c64cf158ac957ad"} Oct 07 07:58:38 crc kubenswrapper[4875]: I1007 07:58:38.819471 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/downloads-7954f5f757-4fthd" Oct 07 07:58:38 crc kubenswrapper[4875]: I1007 07:58:38.821417 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-5khj4"] Oct 07 07:58:38 crc kubenswrapper[4875]: I1007 07:58:38.828457 4875 patch_prober.go:28] interesting pod/downloads-7954f5f757-4fthd container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" start-of-body= Oct 07 07:58:38 crc kubenswrapper[4875]: I1007 07:58:38.828522 4875 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-4fthd" podUID="97431ef0-70e0-4c93-9ebc-ba3c9823685f" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" Oct 07 07:58:38 crc kubenswrapper[4875]: I1007 07:58:38.829066 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-cx82k" event={"ID":"aca7accd-6755-4e1c-be1c-2879b913e87c","Type":"ContainerStarted","Data":"20169babf0857b7f96cc49d73551f0bd7b5cb491743813310a7544b1a8693f6f"} Oct 07 07:58:38 crc kubenswrapper[4875]: I1007 07:58:38.835032 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-vpcnf" event={"ID":"9e48afb0-e9d1-4d51-a992-2de00ba12ec6","Type":"ContainerStarted","Data":"52a56661433634fc04957f55a6c5ba45db70d250fd8f18956651db93b489a562"} Oct 07 07:58:38 crc kubenswrapper[4875]: I1007 07:58:38.836702 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 07:58:38 crc kubenswrapper[4875]: E1007 07:58:38.839003 4875 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 07:58:39.338960694 +0000 UTC m=+144.298731387 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 07:58:38 crc kubenswrapper[4875]: I1007 07:58:38.841044 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-zpc28" event={"ID":"5679969e-90bf-49f0-b478-7312b6e13a05","Type":"ContainerStarted","Data":"1287ca4e4f87b1bbdce5a2396594dfd7f97c6629ff689201537facd7ec3727c1"} Oct 07 07:58:38 crc kubenswrapper[4875]: I1007 07:58:38.846670 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-pxkvz" event={"ID":"665d7a28-86ba-498f-a7b4-60d5c7e8ef8c","Type":"ContainerStarted","Data":"2e3f396244267a65d55330bb8871d20f25f38d383e5157ddee886e27fa037f5b"} Oct 07 07:58:38 crc kubenswrapper[4875]: I1007 07:58:38.884329 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-wpqln"] Oct 07 07:58:38 crc kubenswrapper[4875]: I1007 07:58:38.885533 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-8r27m"] Oct 07 07:58:38 crc kubenswrapper[4875]: I1007 07:58:38.940138 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b5q5p\" (UID: \"2fcc08ca-5985-4132-a071-6821ef40cc5f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b5q5p" Oct 07 07:58:38 crc kubenswrapper[4875]: E1007 07:58:38.941380 4875 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 07:58:39.441365327 +0000 UTC m=+144.401135960 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b5q5p" (UID: "2fcc08ca-5985-4132-a071-6821ef40cc5f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 07:58:38 crc kubenswrapper[4875]: W1007 07:58:38.979520 4875 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod863cddd5_0467_4ab9_b258_4ee6642bbe0c.slice/crio-4be595dbd7f02352f6e16d8bfbaf9cdbf27259ccf31b76212358f43b61192a42 WatchSource:0}: Error finding container 4be595dbd7f02352f6e16d8bfbaf9cdbf27259ccf31b76212358f43b61192a42: Status 404 returned error can't find the container with id 4be595dbd7f02352f6e16d8bfbaf9cdbf27259ccf31b76212358f43b61192a42 Oct 07 07:58:39 crc kubenswrapper[4875]: I1007 07:58:39.042940 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 07:58:39 crc kubenswrapper[4875]: E1007 07:58:39.043184 4875 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 07:58:39.543153121 +0000 UTC m=+144.502923684 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 07:58:39 crc kubenswrapper[4875]: I1007 07:58:39.043630 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b5q5p\" (UID: \"2fcc08ca-5985-4132-a071-6821ef40cc5f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b5q5p" Oct 07 07:58:39 crc kubenswrapper[4875]: E1007 07:58:39.044108 4875 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 07:58:39.544096229 +0000 UTC m=+144.503866832 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b5q5p" (UID: "2fcc08ca-5985-4132-a071-6821ef40cc5f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 07:58:39 crc kubenswrapper[4875]: I1007 07:58:39.144540 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 07:58:39 crc kubenswrapper[4875]: E1007 07:58:39.144697 4875 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 07:58:39.644670235 +0000 UTC m=+144.604440778 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 07:58:39 crc kubenswrapper[4875]: I1007 07:58:39.144809 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b5q5p\" (UID: \"2fcc08ca-5985-4132-a071-6821ef40cc5f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b5q5p" Oct 07 07:58:39 crc kubenswrapper[4875]: E1007 07:58:39.145184 4875 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 07:58:39.645171661 +0000 UTC m=+144.604942284 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b5q5p" (UID: "2fcc08ca-5985-4132-a071-6821ef40cc5f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 07:58:39 crc kubenswrapper[4875]: I1007 07:58:39.206283 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication-operator/authentication-operator-69f744f599-gjnmj" podStartSLOduration=123.206261446 podStartE2EDuration="2m3.206261446s" podCreationTimestamp="2025-10-07 07:56:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 07:58:39.204967346 +0000 UTC m=+144.164737899" watchObservedRunningTime="2025-10-07 07:58:39.206261446 +0000 UTC m=+144.166031989" Oct 07 07:58:39 crc kubenswrapper[4875]: I1007 07:58:39.246238 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 07:58:39 crc kubenswrapper[4875]: E1007 07:58:39.247258 4875 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 07:58:39.747244124 +0000 UTC m=+144.707014667 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 07:58:39 crc kubenswrapper[4875]: I1007 07:58:39.324979 4875 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-ingress/router-default-5444994796-vpcnf" Oct 07 07:58:39 crc kubenswrapper[4875]: I1007 07:58:39.329639 4875 patch_prober.go:28] interesting pod/router-default-5444994796-vpcnf container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 07 07:58:39 crc kubenswrapper[4875]: [-]has-synced failed: reason withheld Oct 07 07:58:39 crc kubenswrapper[4875]: [+]process-running ok Oct 07 07:58:39 crc kubenswrapper[4875]: healthz check failed Oct 07 07:58:39 crc kubenswrapper[4875]: I1007 07:58:39.329700 4875 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-vpcnf" podUID="9e48afb0-e9d1-4d51-a992-2de00ba12ec6" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 07 07:58:39 crc kubenswrapper[4875]: I1007 07:58:39.337567 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress/router-default-5444994796-vpcnf" podStartSLOduration=123.337542064 podStartE2EDuration="2m3.337542064s" podCreationTimestamp="2025-10-07 07:56:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 07:58:39.330853849 +0000 UTC m=+144.290624392" watchObservedRunningTime="2025-10-07 07:58:39.337542064 +0000 UTC m=+144.297312617" Oct 07 07:58:39 crc kubenswrapper[4875]: I1007 07:58:39.348445 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b5q5p\" (UID: \"2fcc08ca-5985-4132-a071-6821ef40cc5f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b5q5p" Oct 07 07:58:39 crc kubenswrapper[4875]: E1007 07:58:39.349379 4875 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 07:58:39.849355037 +0000 UTC m=+144.809125640 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b5q5p" (UID: "2fcc08ca-5985-4132-a071-6821ef40cc5f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 07:58:39 crc kubenswrapper[4875]: I1007 07:58:39.449842 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 07:58:39 crc kubenswrapper[4875]: E1007 07:58:39.450127 4875 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 07:58:39.950103388 +0000 UTC m=+144.909873941 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 07:58:39 crc kubenswrapper[4875]: I1007 07:58:39.450583 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b5q5p\" (UID: \"2fcc08ca-5985-4132-a071-6821ef40cc5f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b5q5p" Oct 07 07:58:39 crc kubenswrapper[4875]: E1007 07:58:39.450907 4875 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 07:58:39.950890673 +0000 UTC m=+144.910661206 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b5q5p" (UID: "2fcc08ca-5985-4132-a071-6821ef40cc5f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 07:58:39 crc kubenswrapper[4875]: I1007 07:58:39.551080 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 07:58:39 crc kubenswrapper[4875]: E1007 07:58:39.551437 4875 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 07:58:40.051423818 +0000 UTC m=+145.011194361 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 07:58:39 crc kubenswrapper[4875]: I1007 07:58:39.567911 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-q5g8q" podStartSLOduration=123.567868682 podStartE2EDuration="2m3.567868682s" podCreationTimestamp="2025-10-07 07:56:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 07:58:39.530797355 +0000 UTC m=+144.490567898" watchObservedRunningTime="2025-10-07 07:58:39.567868682 +0000 UTC m=+144.527639225" Oct 07 07:58:39 crc kubenswrapper[4875]: I1007 07:58:39.613103 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-f9d7485db-wccw7" podStartSLOduration=123.61308392 podStartE2EDuration="2m3.61308392s" podCreationTimestamp="2025-10-07 07:56:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 07:58:39.611561474 +0000 UTC m=+144.571332037" watchObservedRunningTime="2025-10-07 07:58:39.61308392 +0000 UTC m=+144.572854463" Oct 07 07:58:39 crc kubenswrapper[4875]: I1007 07:58:39.654188 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b5q5p\" (UID: \"2fcc08ca-5985-4132-a071-6821ef40cc5f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b5q5p" Oct 07 07:58:39 crc kubenswrapper[4875]: E1007 07:58:39.654843 4875 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 07:58:40.154825591 +0000 UTC m=+145.114596134 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b5q5p" (UID: "2fcc08ca-5985-4132-a071-6821ef40cc5f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 07:58:39 crc kubenswrapper[4875]: I1007 07:58:39.744277 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console-operator/console-operator-58897d9998-rtps2" podStartSLOduration=123.744253065 podStartE2EDuration="2m3.744253065s" podCreationTimestamp="2025-10-07 07:56:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 07:58:39.68835548 +0000 UTC m=+144.648126023" watchObservedRunningTime="2025-10-07 07:58:39.744253065 +0000 UTC m=+144.704023608" Oct 07 07:58:39 crc kubenswrapper[4875]: I1007 07:58:39.760488 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 07:58:39 crc kubenswrapper[4875]: E1007 07:58:39.760737 4875 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 07:58:40.26068934 +0000 UTC m=+145.220459883 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 07:58:39 crc kubenswrapper[4875]: I1007 07:58:39.760850 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b5q5p\" (UID: \"2fcc08ca-5985-4132-a071-6821ef40cc5f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b5q5p" Oct 07 07:58:39 crc kubenswrapper[4875]: E1007 07:58:39.761363 4875 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 07:58:40.26134705 +0000 UTC m=+145.221117583 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b5q5p" (UID: "2fcc08ca-5985-4132-a071-6821ef40cc5f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 07:58:39 crc kubenswrapper[4875]: I1007 07:58:39.870948 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 07:58:39 crc kubenswrapper[4875]: E1007 07:58:39.871889 4875 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 07:58:40.371859602 +0000 UTC m=+145.331630135 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 07:58:39 crc kubenswrapper[4875]: I1007 07:58:39.899237 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/downloads-7954f5f757-4fthd" podStartSLOduration=123.899207591 podStartE2EDuration="2m3.899207591s" podCreationTimestamp="2025-10-07 07:56:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 07:58:39.898960353 +0000 UTC m=+144.858730916" watchObservedRunningTime="2025-10-07 07:58:39.899207591 +0000 UTC m=+144.858978134" Oct 07 07:58:39 crc kubenswrapper[4875]: I1007 07:58:39.912348 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-j9tjw" event={"ID":"83061376-ca6f-4ccc-8da0-bede4a497b4f","Type":"ContainerStarted","Data":"c752782e23754b4b0e11f4e7c85183900c60495b30945ab2879cc0541f704f29"} Oct 07 07:58:39 crc kubenswrapper[4875]: I1007 07:58:39.926870 4875 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-j9tjw container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.39:8080/healthz\": dial tcp 10.217.0.39:8080: connect: connection refused" start-of-body= Oct 07 07:58:39 crc kubenswrapper[4875]: I1007 07:58:39.927202 4875 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-j9tjw" podUID="83061376-ca6f-4ccc-8da0-bede4a497b4f" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.39:8080/healthz\": dial tcp 10.217.0.39:8080: connect: connection refused" Oct 07 07:58:39 crc kubenswrapper[4875]: I1007 07:58:39.926984 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-j9tjw" Oct 07 07:58:39 crc kubenswrapper[4875]: I1007 07:58:39.935081 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-wrx6d" event={"ID":"e032dc50-8da1-4b34-981a-ec2b162cace7","Type":"ContainerStarted","Data":"117d97e2880bfa9d2eb4437cdb072514dff4c55f776fb187f151523a069c4916"} Oct 07 07:58:39 crc kubenswrapper[4875]: I1007 07:58:39.935142 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-wrx6d" event={"ID":"e032dc50-8da1-4b34-981a-ec2b162cace7","Type":"ContainerStarted","Data":"4206626aac333b3a14381dd19c4036fdbb73ed39be8602d7bbc156bf70a2d834"} Oct 07 07:58:39 crc kubenswrapper[4875]: I1007 07:58:39.948076 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-lqcfl" event={"ID":"67bb7921-f940-46a4-80c8-aa1aeae3b33b","Type":"ContainerStarted","Data":"e1e64a0bed67923f0e75d1206c2514052b343fd833584fc1b107bd9249fb20e7"} Oct 07 07:58:39 crc kubenswrapper[4875]: I1007 07:58:39.955320 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-67qrc" event={"ID":"0ecce496-7852-48a4-869d-9d2e3e01cf3d","Type":"ContainerStarted","Data":"c200af3a53e864b5b193793424abf1804ced5caedd68326ec023d56b7f83f3ba"} Oct 07 07:58:39 crc kubenswrapper[4875]: I1007 07:58:39.955343 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-67qrc" event={"ID":"0ecce496-7852-48a4-869d-9d2e3e01cf3d","Type":"ContainerStarted","Data":"383bb873a20c668a142be37d0f5bd00e4f0b1809665183881eff3dad10b0b6a6"} Oct 07 07:58:39 crc kubenswrapper[4875]: I1007 07:58:39.974207 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b5q5p\" (UID: \"2fcc08ca-5985-4132-a071-6821ef40cc5f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b5q5p" Oct 07 07:58:39 crc kubenswrapper[4875]: E1007 07:58:39.974500 4875 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 07:58:40.474485551 +0000 UTC m=+145.434256094 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b5q5p" (UID: "2fcc08ca-5985-4132-a071-6821ef40cc5f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 07:58:40 crc kubenswrapper[4875]: I1007 07:58:40.022353 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-cf7b2" podStartSLOduration=123.022328619 podStartE2EDuration="2m3.022328619s" podCreationTimestamp="2025-10-07 07:56:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 07:58:40.019980217 +0000 UTC m=+144.979750750" watchObservedRunningTime="2025-10-07 07:58:40.022328619 +0000 UTC m=+144.982099162" Oct 07 07:58:40 crc kubenswrapper[4875]: I1007 07:58:40.026450 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-p2bx4" event={"ID":"4e8107af-7b4e-44e0-88f3-47fa08be03fa","Type":"ContainerStarted","Data":"111205c9679d7dcb1ec64c3aa4a0931ef2eeefc9d5d12371da7ee1f799bef8f9"} Oct 07 07:58:40 crc kubenswrapper[4875]: I1007 07:58:40.037299 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-gk4f8" event={"ID":"de07f99e-f06a-4800-96f5-ecfebf9630f2","Type":"ContainerStarted","Data":"73ed0999bb7b60acefc5124e4e780af48bd9ddfa872a09b9b4fb69bce9273e03"} Oct 07 07:58:40 crc kubenswrapper[4875]: I1007 07:58:40.038672 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-879f6c89f-gk4f8" Oct 07 07:58:40 crc kubenswrapper[4875]: I1007 07:58:40.040427 4875 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-gk4f8 container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.7:8443/healthz\": dial tcp 10.217.0.7:8443: connect: connection refused" start-of-body= Oct 07 07:58:40 crc kubenswrapper[4875]: I1007 07:58:40.040499 4875 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-gk4f8" podUID="de07f99e-f06a-4800-96f5-ecfebf9630f2" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.7:8443/healthz\": dial tcp 10.217.0.7:8443: connect: connection refused" Oct 07 07:58:40 crc kubenswrapper[4875]: I1007 07:58:40.044733 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-cx82k" event={"ID":"aca7accd-6755-4e1c-be1c-2879b913e87c","Type":"ContainerStarted","Data":"307d6cc023f73f01b1831981255c57470a0f39a78b126f0a2de5927bb87b94ed"} Oct 07 07:58:40 crc kubenswrapper[4875]: I1007 07:58:40.048481 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-hlnjg" event={"ID":"88313132-e652-4c64-b607-5b806c93e153","Type":"ContainerStarted","Data":"c024e8a40266b2286c733e08a9fa62bbec22ce7621d979a4bd68074081ed0352"} Oct 07 07:58:40 crc kubenswrapper[4875]: I1007 07:58:40.052911 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-72b9h" event={"ID":"550c8dbf-53c3-44c9-87c8-d7bc275f384e","Type":"ContainerStarted","Data":"45a98e0580d863b23f0d53e74b22fa247f818732a4ab24a871b097f711582f1b"} Oct 07 07:58:40 crc kubenswrapper[4875]: I1007 07:58:40.070259 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-5mtmp" event={"ID":"b3dea818-360c-47a0-8b92-48cb0ab39cb4","Type":"ContainerStarted","Data":"65c5bb4b4db64f8162b6b79320a6cf2c3497f8c079f16266e63fbb41c131f563"} Oct 07 07:58:40 crc kubenswrapper[4875]: I1007 07:58:40.076661 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 07:58:40 crc kubenswrapper[4875]: E1007 07:58:40.076841 4875 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 07:58:40.576814101 +0000 UTC m=+145.536584634 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 07:58:40 crc kubenswrapper[4875]: I1007 07:58:40.076909 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b5q5p\" (UID: \"2fcc08ca-5985-4132-a071-6821ef40cc5f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b5q5p" Oct 07 07:58:40 crc kubenswrapper[4875]: E1007 07:58:40.077394 4875 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 07:58:40.577369068 +0000 UTC m=+145.537139611 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b5q5p" (UID: "2fcc08ca-5985-4132-a071-6821ef40cc5f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 07:58:40 crc kubenswrapper[4875]: I1007 07:58:40.092827 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-mgh7p" event={"ID":"d5c0d7ea-c293-4a7d-ba73-3fceeb8b6ea4","Type":"ContainerStarted","Data":"99aa145d0eba97b7e20a3871800d1b4f78fff10d0c6d38deed932efa0cc84cee"} Oct 07 07:58:40 crc kubenswrapper[4875]: I1007 07:58:40.103569 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-lqcfl" podStartSLOduration=123.103554092 podStartE2EDuration="2m3.103554092s" podCreationTimestamp="2025-10-07 07:56:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 07:58:40.070162937 +0000 UTC m=+145.029933500" watchObservedRunningTime="2025-10-07 07:58:40.103554092 +0000 UTC m=+145.063324635" Oct 07 07:58:40 crc kubenswrapper[4875]: I1007 07:58:40.124701 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-pxkvz" event={"ID":"665d7a28-86ba-498f-a7b4-60d5c7e8ef8c","Type":"ContainerStarted","Data":"4db4ba2cae6916cc687b3c2fd77577b05ebb72c71dc8194bf2f8d1aa81039cc6"} Oct 07 07:58:40 crc kubenswrapper[4875]: I1007 07:58:40.145789 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-zpc28" event={"ID":"5679969e-90bf-49f0-b478-7312b6e13a05","Type":"ContainerStarted","Data":"fb7351c73041da3faf7e59f74bfa5c5c5621e5a1f4c1febfcf36a0461c8d19a8"} Oct 07 07:58:40 crc kubenswrapper[4875]: I1007 07:58:40.146243 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-zpc28" event={"ID":"5679969e-90bf-49f0-b478-7312b6e13a05","Type":"ContainerStarted","Data":"352d1133a5beb3473ec502efc9e8cbec5df7faed710dbcdf5a6b0cc4215120e4"} Oct 07 07:58:40 crc kubenswrapper[4875]: I1007 07:58:40.150173 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-qtl66" event={"ID":"d52a533b-1e88-42e4-b9a6-e1af57eeff28","Type":"ContainerStarted","Data":"e79eb216642d8493a4004b2642547f2507b2b18ef1fe705bcf57e4626ce0df49"} Oct 07 07:58:40 crc kubenswrapper[4875]: I1007 07:58:40.150266 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-qtl66" event={"ID":"d52a533b-1e88-42e4-b9a6-e1af57eeff28","Type":"ContainerStarted","Data":"e23133fd075dbc440362d06b1ad8c97cea5b0bf25e4679cb80475ba71651b8e3"} Oct 07 07:58:40 crc kubenswrapper[4875]: I1007 07:58:40.153376 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-qtl66" Oct 07 07:58:40 crc kubenswrapper[4875]: I1007 07:58:40.165736 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-wtjgx" event={"ID":"7f36de34-531b-4910-b322-3e3a2d0bf7f5","Type":"ContainerStarted","Data":"3ff0d3214801533169250e32a7ee6db74ed63475a5ac8c59768a1c25bfd3fd1b"} Oct 07 07:58:40 crc kubenswrapper[4875]: I1007 07:58:40.165785 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-wtjgx" event={"ID":"7f36de34-531b-4910-b322-3e3a2d0bf7f5","Type":"ContainerStarted","Data":"7afd1b55645bfab4d2f3ebd31955d29288f7a479756855dfd4f52bb90c9e2e61"} Oct 07 07:58:40 crc kubenswrapper[4875]: I1007 07:58:40.166669 4875 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-qtl66 container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.34:5443/healthz\": dial tcp 10.217.0.34:5443: connect: connection refused" start-of-body= Oct 07 07:58:40 crc kubenswrapper[4875]: I1007 07:58:40.166839 4875 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-qtl66" podUID="d52a533b-1e88-42e4-b9a6-e1af57eeff28" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.34:5443/healthz\": dial tcp 10.217.0.34:5443: connect: connection refused" Oct 07 07:58:40 crc kubenswrapper[4875]: I1007 07:58:40.174283 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-wsp6s" event={"ID":"e89a2d49-34f1-4e24-8b62-5bd8bfe39b8b","Type":"ContainerStarted","Data":"23f9c6f1f2af8da9f6a18cbe8d5efc026795e301d6e6e5727607e3452f99e552"} Oct 07 07:58:40 crc kubenswrapper[4875]: I1007 07:58:40.176182 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-wrx6d" podStartSLOduration=123.17617026 podStartE2EDuration="2m3.17617026s" podCreationTimestamp="2025-10-07 07:56:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 07:58:40.108563715 +0000 UTC m=+145.068334258" watchObservedRunningTime="2025-10-07 07:58:40.17617026 +0000 UTC m=+145.135940803" Oct 07 07:58:40 crc kubenswrapper[4875]: I1007 07:58:40.178726 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-j9tjw" podStartSLOduration=123.178710368 podStartE2EDuration="2m3.178710368s" podCreationTimestamp="2025-10-07 07:56:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 07:58:40.15469173 +0000 UTC m=+145.114462293" watchObservedRunningTime="2025-10-07 07:58:40.178710368 +0000 UTC m=+145.138480911" Oct 07 07:58:40 crc kubenswrapper[4875]: I1007 07:58:40.182667 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 07:58:40 crc kubenswrapper[4875]: E1007 07:58:40.182815 4875 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 07:58:40.682788023 +0000 UTC m=+145.642558566 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 07:58:40 crc kubenswrapper[4875]: I1007 07:58:40.183346 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b5q5p\" (UID: \"2fcc08ca-5985-4132-a071-6821ef40cc5f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b5q5p" Oct 07 07:58:40 crc kubenswrapper[4875]: E1007 07:58:40.187060 4875 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 07:58:40.687043124 +0000 UTC m=+145.646813667 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b5q5p" (UID: "2fcc08ca-5985-4132-a071-6821ef40cc5f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 07:58:40 crc kubenswrapper[4875]: I1007 07:58:40.192975 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca/service-ca-9c57cc56f-67qrc" podStartSLOduration=123.192928274 podStartE2EDuration="2m3.192928274s" podCreationTimestamp="2025-10-07 07:56:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 07:58:40.18790503 +0000 UTC m=+145.147675573" watchObservedRunningTime="2025-10-07 07:58:40.192928274 +0000 UTC m=+145.152698827" Oct 07 07:58:40 crc kubenswrapper[4875]: I1007 07:58:40.194135 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-8r27m" event={"ID":"3e90baa3-eb1c-4077-9451-a927f4c2303b","Type":"ContainerStarted","Data":"a56d8a79783c346d56b710aea7f9a085cea44a392ee95ba5e519f17d24bc1483"} Oct 07 07:58:40 crc kubenswrapper[4875]: I1007 07:58:40.225535 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-wsp6s" podStartSLOduration=124.225512084 podStartE2EDuration="2m4.225512084s" podCreationTimestamp="2025-10-07 07:56:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 07:58:40.221103289 +0000 UTC m=+145.180873832" watchObservedRunningTime="2025-10-07 07:58:40.225512084 +0000 UTC m=+145.185282617" Oct 07 07:58:40 crc kubenswrapper[4875]: I1007 07:58:40.248870 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-57m8s" event={"ID":"ddc50d0d-840b-41e8-821f-d4e97916051a","Type":"ContainerStarted","Data":"eaa0e69fc93c94489771069a85550ca40db61effb0a03fc9dbf2a9854ddc3006"} Oct 07 07:58:40 crc kubenswrapper[4875]: I1007 07:58:40.248992 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-57m8s" event={"ID":"ddc50d0d-840b-41e8-821f-d4e97916051a","Type":"ContainerStarted","Data":"b5e7d42e3e132ee75994899f11fb66a2a9ca3539eaabfdb9655c3304b0033f22"} Oct 07 07:58:40 crc kubenswrapper[4875]: I1007 07:58:40.249849 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-57m8s" Oct 07 07:58:40 crc kubenswrapper[4875]: I1007 07:58:40.263581 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-rffld" event={"ID":"0ee20e7b-f9fe-4717-bad6-16ca5936e100","Type":"ContainerStarted","Data":"5df60a50da375b13ecbd709b77774c64a52c9244abcd1ba3fbc2088f9bfd08e5"} Oct 07 07:58:40 crc kubenswrapper[4875]: I1007 07:58:40.277668 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-5khj4" event={"ID":"41dcc994-8e79-4282-8389-abc86eee67b0","Type":"ContainerStarted","Data":"26ed2e6c243608ef4d983e34b82adc27ac83017b2af7076fed1a2928f197753e"} Oct 07 07:58:40 crc kubenswrapper[4875]: I1007 07:58:40.288038 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 07:58:40 crc kubenswrapper[4875]: E1007 07:58:40.289213 4875 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 07:58:40.789186058 +0000 UTC m=+145.748956641 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 07:58:40 crc kubenswrapper[4875]: I1007 07:58:40.294401 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-q7gd8" event={"ID":"8fe11f26-eb43-48e5-a2be-487a193ca3d3","Type":"ContainerStarted","Data":"ed5ccb03d59a40711419f9399052e38e8daafa724f047a560637b0c318bef976"} Oct 07 07:58:40 crc kubenswrapper[4875]: I1007 07:58:40.300586 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-bnk9v" event={"ID":"f363016c-d18e-4d16-96e3-35b871d6f130","Type":"ContainerStarted","Data":"f655f5a43b671e1b9423040120072c307beb982f67182c9955c5b35e700b3ab8"} Oct 07 07:58:40 crc kubenswrapper[4875]: I1007 07:58:40.311732 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-649xl" event={"ID":"eb7d772c-6ae9-4793-8727-653f80c2d8a0","Type":"ContainerStarted","Data":"d843ee051c81beb0fd37f166aca4070e35f016f70fa7fcdd672349473c814e5b"} Oct 07 07:58:40 crc kubenswrapper[4875]: I1007 07:58:40.311792 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-649xl" event={"ID":"eb7d772c-6ae9-4793-8727-653f80c2d8a0","Type":"ContainerStarted","Data":"44ec3c5369b4745c9b187470acf692da2e184043f987a71b405614b82c2b6607"} Oct 07 07:58:40 crc kubenswrapper[4875]: I1007 07:58:40.325563 4875 generic.go:334] "Generic (PLEG): container finished" podID="e612704a-aea6-48f6-82c1-cee4e0e77859" containerID="605f88fca94c4abeaeb91f59ba5511e49082ad3cac3be9bdf727da31e5f10f80" exitCode=0 Oct 07 07:58:40 crc kubenswrapper[4875]: I1007 07:58:40.325669 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-7clsb" event={"ID":"e612704a-aea6-48f6-82c1-cee4e0e77859","Type":"ContainerDied","Data":"605f88fca94c4abeaeb91f59ba5511e49082ad3cac3be9bdf727da31e5f10f80"} Oct 07 07:58:40 crc kubenswrapper[4875]: I1007 07:58:40.326324 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-mgh7p" podStartSLOduration=123.326297907 podStartE2EDuration="2m3.326297907s" podCreationTimestamp="2025-10-07 07:56:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 07:58:40.323444329 +0000 UTC m=+145.283214872" watchObservedRunningTime="2025-10-07 07:58:40.326297907 +0000 UTC m=+145.286068450" Oct 07 07:58:40 crc kubenswrapper[4875]: I1007 07:58:40.327096 4875 patch_prober.go:28] interesting pod/router-default-5444994796-vpcnf container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 07 07:58:40 crc kubenswrapper[4875]: [-]has-synced failed: reason withheld Oct 07 07:58:40 crc kubenswrapper[4875]: [+]process-running ok Oct 07 07:58:40 crc kubenswrapper[4875]: healthz check failed Oct 07 07:58:40 crc kubenswrapper[4875]: I1007 07:58:40.327370 4875 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-vpcnf" podUID="9e48afb0-e9d1-4d51-a992-2de00ba12ec6" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 07 07:58:40 crc kubenswrapper[4875]: I1007 07:58:40.334640 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-6gfc9" event={"ID":"50f94d0f-37cf-4a2e-9241-3394cbaefd14","Type":"ContainerStarted","Data":"864356f533cb21aac95319b4adec7d68568cd552aaf9030b578b20a82dccbf51"} Oct 07 07:58:40 crc kubenswrapper[4875]: I1007 07:58:40.334697 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-6gfc9" event={"ID":"50f94d0f-37cf-4a2e-9241-3394cbaefd14","Type":"ContainerStarted","Data":"78a724149fabec031f38465330a8e8d105425bcb7aa65c954d927dd0cef56d04"} Oct 07 07:58:40 crc kubenswrapper[4875]: I1007 07:58:40.345792 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29330385-lc8x5" event={"ID":"2da55ba3-88e7-4cda-8ad9-b5945e39e991","Type":"ContainerStarted","Data":"bbdb2f9ebc9cb3dde8e0fb046436b4bacec8657642ffd439302fcc99b2579d3e"} Oct 07 07:58:40 crc kubenswrapper[4875]: I1007 07:58:40.345862 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29330385-lc8x5" event={"ID":"2da55ba3-88e7-4cda-8ad9-b5945e39e991","Type":"ContainerStarted","Data":"1c0815cfd8ddb9b5a4ab536fb4a22a2f12a89701ed356e137ee53ec2a2384a50"} Oct 07 07:58:40 crc kubenswrapper[4875]: I1007 07:58:40.355374 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-6svk5" event={"ID":"ad43d1f1-e9d0-400f-87fc-d397aebd5473","Type":"ContainerStarted","Data":"a4d91decfae9ec83c98749b1a1d884ad3d39d83a55abd9a996995ea524cde342"} Oct 07 07:58:40 crc kubenswrapper[4875]: I1007 07:58:40.384738 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-wpqln" event={"ID":"863cddd5-0467-4ab9-b258-4ee6642bbe0c","Type":"ContainerStarted","Data":"4be595dbd7f02352f6e16d8bfbaf9cdbf27259ccf31b76212358f43b61192a42"} Oct 07 07:58:40 crc kubenswrapper[4875]: I1007 07:58:40.389479 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b5q5p\" (UID: \"2fcc08ca-5985-4132-a071-6821ef40cc5f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b5q5p" Oct 07 07:58:40 crc kubenswrapper[4875]: E1007 07:58:40.390458 4875 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 07:58:40.890419474 +0000 UTC m=+145.850190017 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b5q5p" (UID: "2fcc08ca-5985-4132-a071-6821ef40cc5f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 07:58:40 crc kubenswrapper[4875]: I1007 07:58:40.405018 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-879f6c89f-gk4f8" podStartSLOduration=124.404983772 podStartE2EDuration="2m4.404983772s" podCreationTimestamp="2025-10-07 07:56:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 07:58:40.374177046 +0000 UTC m=+145.333947599" watchObservedRunningTime="2025-10-07 07:58:40.404983772 +0000 UTC m=+145.364754315" Oct 07 07:58:40 crc kubenswrapper[4875]: I1007 07:58:40.425100 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-cr4tx" event={"ID":"905136e9-422f-465a-95df-4a51cfbbc172","Type":"ContainerStarted","Data":"e9f909c02c40c2c1938ac58e7757624e59a6becf40f38e418cd077df14bcf68b"} Oct 07 07:58:40 crc kubenswrapper[4875]: I1007 07:58:40.426832 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-p2bx4" podStartSLOduration=123.426804151 podStartE2EDuration="2m3.426804151s" podCreationTimestamp="2025-10-07 07:56:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 07:58:40.403408154 +0000 UTC m=+145.363178717" watchObservedRunningTime="2025-10-07 07:58:40.426804151 +0000 UTC m=+145.386574694" Oct 07 07:58:40 crc kubenswrapper[4875]: I1007 07:58:40.432120 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-server-cx82k" podStartSLOduration=6.432100633 podStartE2EDuration="6.432100633s" podCreationTimestamp="2025-10-07 07:58:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 07:58:40.426397428 +0000 UTC m=+145.386167971" watchObservedRunningTime="2025-10-07 07:58:40.432100633 +0000 UTC m=+145.391871176" Oct 07 07:58:40 crc kubenswrapper[4875]: I1007 07:58:40.436125 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-hs8xs" event={"ID":"b405a105-6441-41b4-90c2-ee4f6e07fb68","Type":"ContainerStarted","Data":"832e8a29770ad9d45b3d3ee9a1a4b0833de30e13d215d172c27259a5da801e72"} Oct 07 07:58:40 crc kubenswrapper[4875]: I1007 07:58:40.439468 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-nrcsn" event={"ID":"703e43ba-ed43-4ffc-92e3-0063568dcefc","Type":"ContainerStarted","Data":"cb77ba0c63718d5f4a080450d49d866e4b6aa4e70a6931a48705289da8aef749"} Oct 07 07:58:40 crc kubenswrapper[4875]: I1007 07:58:40.440409 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-nrcsn" Oct 07 07:58:40 crc kubenswrapper[4875]: I1007 07:58:40.443102 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-8x4x6" event={"ID":"0d7070c6-e24b-4b05-b574-92b15066833c","Type":"ContainerStarted","Data":"994f9860b8ac7c82b1beb546eb61581d34591e651b462396b9b1881b02eb722d"} Oct 07 07:58:40 crc kubenswrapper[4875]: I1007 07:58:40.443613 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-8x4x6" Oct 07 07:58:40 crc kubenswrapper[4875]: I1007 07:58:40.447200 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-97glv" event={"ID":"2e36610d-9f23-4c7e-9980-fad03a7392d9","Type":"ContainerStarted","Data":"94e4cc2380408d524aec49aacf68299245e14275ec6093142b5495d16b052b0d"} Oct 07 07:58:40 crc kubenswrapper[4875]: I1007 07:58:40.448970 4875 patch_prober.go:28] interesting pod/console-operator-58897d9998-rtps2 container/console-operator namespace/openshift-console-operator: Readiness probe status=failure output="Get \"https://10.217.0.10:8443/readyz\": dial tcp 10.217.0.10:8443: connect: connection refused" start-of-body= Oct 07 07:58:40 crc kubenswrapper[4875]: I1007 07:58:40.449045 4875 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console-operator/console-operator-58897d9998-rtps2" podUID="ebf98430-6317-40a9-be26-84a817cfbf1e" containerName="console-operator" probeResult="failure" output="Get \"https://10.217.0.10:8443/readyz\": dial tcp 10.217.0.10:8443: connect: connection refused" Oct 07 07:58:40 crc kubenswrapper[4875]: I1007 07:58:40.450263 4875 patch_prober.go:28] interesting pod/downloads-7954f5f757-4fthd container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" start-of-body= Oct 07 07:58:40 crc kubenswrapper[4875]: I1007 07:58:40.450300 4875 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-4fthd" podUID="97431ef0-70e0-4c93-9ebc-ba3c9823685f" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" Oct 07 07:58:40 crc kubenswrapper[4875]: I1007 07:58:40.450383 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/machine-api-operator-5694c8668f-zpc28" podStartSLOduration=123.450366234 podStartE2EDuration="2m3.450366234s" podCreationTimestamp="2025-10-07 07:56:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 07:58:40.449825658 +0000 UTC m=+145.409596231" watchObservedRunningTime="2025-10-07 07:58:40.450366234 +0000 UTC m=+145.410136777" Oct 07 07:58:40 crc kubenswrapper[4875]: I1007 07:58:40.461591 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-cf7b2" Oct 07 07:58:40 crc kubenswrapper[4875]: I1007 07:58:40.472218 4875 patch_prober.go:28] interesting pod/catalog-operator-68c6474976-8x4x6 container/catalog-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.24:8443/healthz\": dial tcp 10.217.0.24:8443: connect: connection refused" start-of-body= Oct 07 07:58:40 crc kubenswrapper[4875]: I1007 07:58:40.472274 4875 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-8x4x6" podUID="0d7070c6-e24b-4b05-b574-92b15066833c" containerName="catalog-operator" probeResult="failure" output="Get \"https://10.217.0.24:8443/healthz\": dial tcp 10.217.0.24:8443: connect: connection refused" Oct 07 07:58:40 crc kubenswrapper[4875]: I1007 07:58:40.472338 4875 patch_prober.go:28] interesting pod/olm-operator-6b444d44fb-nrcsn container/olm-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.40:8443/healthz\": dial tcp 10.217.0.40:8443: connect: connection refused" start-of-body= Oct 07 07:58:40 crc kubenswrapper[4875]: I1007 07:58:40.472400 4875 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-nrcsn" podUID="703e43ba-ed43-4ffc-92e3-0063568dcefc" containerName="olm-operator" probeResult="failure" output="Get \"https://10.217.0.40:8443/healthz\": dial tcp 10.217.0.40:8443: connect: connection refused" Oct 07 07:58:40 crc kubenswrapper[4875]: I1007 07:58:40.492062 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 07:58:40 crc kubenswrapper[4875]: E1007 07:58:40.493828 4875 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 07:58:40.993813597 +0000 UTC m=+145.953584140 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 07:58:40 crc kubenswrapper[4875]: I1007 07:58:40.566352 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-5mtmp" podStartSLOduration=124.566319922 podStartE2EDuration="2m4.566319922s" podCreationTimestamp="2025-10-07 07:56:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 07:58:40.541542082 +0000 UTC m=+145.501312645" watchObservedRunningTime="2025-10-07 07:58:40.566319922 +0000 UTC m=+145.526090485" Oct 07 07:58:40 crc kubenswrapper[4875]: I1007 07:58:40.567831 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-72b9h" podStartSLOduration=124.567824859 podStartE2EDuration="2m4.567824859s" podCreationTimestamp="2025-10-07 07:56:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 07:58:40.491200327 +0000 UTC m=+145.450970880" watchObservedRunningTime="2025-10-07 07:58:40.567824859 +0000 UTC m=+145.527595402" Oct 07 07:58:40 crc kubenswrapper[4875]: I1007 07:58:40.600261 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b5q5p\" (UID: \"2fcc08ca-5985-4132-a071-6821ef40cc5f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b5q5p" Oct 07 07:58:40 crc kubenswrapper[4875]: I1007 07:58:40.600995 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-qtl66" podStartSLOduration=123.600969256 podStartE2EDuration="2m3.600969256s" podCreationTimestamp="2025-10-07 07:56:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 07:58:40.599321926 +0000 UTC m=+145.559092469" watchObservedRunningTime="2025-10-07 07:58:40.600969256 +0000 UTC m=+145.560739799" Oct 07 07:58:40 crc kubenswrapper[4875]: E1007 07:58:40.603750 4875 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 07:58:41.103733411 +0000 UTC m=+146.063503954 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b5q5p" (UID: "2fcc08ca-5985-4132-a071-6821ef40cc5f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 07:58:40 crc kubenswrapper[4875]: I1007 07:58:40.665799 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-8x4x6" podStartSLOduration=123.665777915 podStartE2EDuration="2m3.665777915s" podCreationTimestamp="2025-10-07 07:56:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 07:58:40.661397261 +0000 UTC m=+145.621167804" watchObservedRunningTime="2025-10-07 07:58:40.665777915 +0000 UTC m=+145.625548458" Oct 07 07:58:40 crc kubenswrapper[4875]: I1007 07:58:40.687338 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-6gfc9" podStartSLOduration=123.687316265 podStartE2EDuration="2m3.687316265s" podCreationTimestamp="2025-10-07 07:56:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 07:58:40.685865491 +0000 UTC m=+145.645636034" watchObservedRunningTime="2025-10-07 07:58:40.687316265 +0000 UTC m=+145.647086808" Oct 07 07:58:40 crc kubenswrapper[4875]: I1007 07:58:40.701857 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 07:58:40 crc kubenswrapper[4875]: E1007 07:58:40.702465 4875 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 07:58:41.20244238 +0000 UTC m=+146.162212923 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 07:58:40 crc kubenswrapper[4875]: I1007 07:58:40.776945 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-bnk9v" podStartSLOduration=124.776919125 podStartE2EDuration="2m4.776919125s" podCreationTimestamp="2025-10-07 07:56:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 07:58:40.724131475 +0000 UTC m=+145.683902038" watchObservedRunningTime="2025-10-07 07:58:40.776919125 +0000 UTC m=+145.736689668" Oct 07 07:58:40 crc kubenswrapper[4875]: I1007 07:58:40.806624 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b5q5p\" (UID: \"2fcc08ca-5985-4132-a071-6821ef40cc5f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b5q5p" Oct 07 07:58:40 crc kubenswrapper[4875]: E1007 07:58:40.807111 4875 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 07:58:41.307099291 +0000 UTC m=+146.266869834 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b5q5p" (UID: "2fcc08ca-5985-4132-a071-6821ef40cc5f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 07:58:40 crc kubenswrapper[4875]: I1007 07:58:40.816287 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-zdjs2" podStartSLOduration=123.816260132 podStartE2EDuration="2m3.816260132s" podCreationTimestamp="2025-10-07 07:56:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 07:58:40.812340393 +0000 UTC m=+145.772110956" watchObservedRunningTime="2025-10-07 07:58:40.816260132 +0000 UTC m=+145.776030675" Oct 07 07:58:40 crc kubenswrapper[4875]: I1007 07:58:40.889115 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-57m8s" podStartSLOduration=123.889092258 podStartE2EDuration="2m3.889092258s" podCreationTimestamp="2025-10-07 07:56:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 07:58:40.888250461 +0000 UTC m=+145.848021024" watchObservedRunningTime="2025-10-07 07:58:40.889092258 +0000 UTC m=+145.848862801" Oct 07 07:58:40 crc kubenswrapper[4875]: I1007 07:58:40.890857 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-6svk5" podStartSLOduration=124.890847422 podStartE2EDuration="2m4.890847422s" podCreationTimestamp="2025-10-07 07:56:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 07:58:40.850118772 +0000 UTC m=+145.809889325" watchObservedRunningTime="2025-10-07 07:58:40.890847422 +0000 UTC m=+145.850617965" Oct 07 07:58:40 crc kubenswrapper[4875]: I1007 07:58:40.908242 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 07:58:40 crc kubenswrapper[4875]: E1007 07:58:40.908751 4875 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 07:58:41.4087322 +0000 UTC m=+146.368502743 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 07:58:40 crc kubenswrapper[4875]: I1007 07:58:40.976668 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-97glv" podStartSLOduration=124.976648455 podStartE2EDuration="2m4.976648455s" podCreationTimestamp="2025-10-07 07:56:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 07:58:40.946211431 +0000 UTC m=+145.905981964" watchObservedRunningTime="2025-10-07 07:58:40.976648455 +0000 UTC m=+145.936418998" Oct 07 07:58:41 crc kubenswrapper[4875]: I1007 07:58:41.011607 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b5q5p\" (UID: \"2fcc08ca-5985-4132-a071-6821ef40cc5f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b5q5p" Oct 07 07:58:41 crc kubenswrapper[4875]: E1007 07:58:41.012030 4875 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 07:58:41.51201622 +0000 UTC m=+146.471786763 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b5q5p" (UID: "2fcc08ca-5985-4132-a071-6821ef40cc5f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 07:58:41 crc kubenswrapper[4875]: I1007 07:58:41.012804 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-nrcsn" podStartSLOduration=124.012794594 podStartE2EDuration="2m4.012794594s" podCreationTimestamp="2025-10-07 07:56:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 07:58:40.976407787 +0000 UTC m=+145.936178340" watchObservedRunningTime="2025-10-07 07:58:41.012794594 +0000 UTC m=+145.972565137" Oct 07 07:58:41 crc kubenswrapper[4875]: I1007 07:58:41.048338 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca-operator/service-ca-operator-777779d784-wpqln" podStartSLOduration=124.048320744 podStartE2EDuration="2m4.048320744s" podCreationTimestamp="2025-10-07 07:56:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 07:58:41.047331384 +0000 UTC m=+146.007101937" watchObservedRunningTime="2025-10-07 07:58:41.048320744 +0000 UTC m=+146.008091297" Oct 07 07:58:41 crc kubenswrapper[4875]: I1007 07:58:41.050303 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-canary/ingress-canary-rffld" podStartSLOduration=7.050291875 podStartE2EDuration="7.050291875s" podCreationTimestamp="2025-10-07 07:58:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 07:58:41.019352895 +0000 UTC m=+145.979123438" watchObservedRunningTime="2025-10-07 07:58:41.050291875 +0000 UTC m=+146.010062418" Oct 07 07:58:41 crc kubenswrapper[4875]: I1007 07:58:41.104480 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-8tcxj" Oct 07 07:58:41 crc kubenswrapper[4875]: I1007 07:58:41.112427 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 07:58:41 crc kubenswrapper[4875]: E1007 07:58:41.112609 4875 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 07:58:41.612586956 +0000 UTC m=+146.572357499 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 07:58:41 crc kubenswrapper[4875]: I1007 07:58:41.112781 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b5q5p\" (UID: \"2fcc08ca-5985-4132-a071-6821ef40cc5f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b5q5p" Oct 07 07:58:41 crc kubenswrapper[4875]: E1007 07:58:41.113199 4875 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 07:58:41.613192865 +0000 UTC m=+146.572963408 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b5q5p" (UID: "2fcc08ca-5985-4132-a071-6821ef40cc5f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 07:58:41 crc kubenswrapper[4875]: I1007 07:58:41.128907 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-hs8xs" podStartSLOduration=125.128863406 podStartE2EDuration="2m5.128863406s" podCreationTimestamp="2025-10-07 07:56:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 07:58:41.088068444 +0000 UTC m=+146.047838987" watchObservedRunningTime="2025-10-07 07:58:41.128863406 +0000 UTC m=+146.088633949" Oct 07 07:58:41 crc kubenswrapper[4875]: I1007 07:58:41.130393 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd-operator/etcd-operator-b45778765-cr4tx" podStartSLOduration=125.130384492 podStartE2EDuration="2m5.130384492s" podCreationTimestamp="2025-10-07 07:56:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 07:58:41.123943164 +0000 UTC m=+146.083713727" watchObservedRunningTime="2025-10-07 07:58:41.130384492 +0000 UTC m=+146.090155035" Oct 07 07:58:41 crc kubenswrapper[4875]: I1007 07:58:41.213555 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 07:58:41 crc kubenswrapper[4875]: E1007 07:58:41.214358 4875 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 07:58:41.714336949 +0000 UTC m=+146.674107492 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 07:58:41 crc kubenswrapper[4875]: I1007 07:58:41.239273 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29330385-lc8x5" podStartSLOduration=125.239257833 podStartE2EDuration="2m5.239257833s" podCreationTimestamp="2025-10-07 07:56:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 07:58:41.17819643 +0000 UTC m=+146.137966973" watchObservedRunningTime="2025-10-07 07:58:41.239257833 +0000 UTC m=+146.199028366" Oct 07 07:58:41 crc kubenswrapper[4875]: I1007 07:58:41.315292 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b5q5p\" (UID: \"2fcc08ca-5985-4132-a071-6821ef40cc5f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b5q5p" Oct 07 07:58:41 crc kubenswrapper[4875]: E1007 07:58:41.315577 4875 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 07:58:41.815560115 +0000 UTC m=+146.775330658 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b5q5p" (UID: "2fcc08ca-5985-4132-a071-6821ef40cc5f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 07:58:41 crc kubenswrapper[4875]: I1007 07:58:41.322869 4875 patch_prober.go:28] interesting pod/router-default-5444994796-vpcnf container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 07 07:58:41 crc kubenswrapper[4875]: [-]has-synced failed: reason withheld Oct 07 07:58:41 crc kubenswrapper[4875]: [+]process-running ok Oct 07 07:58:41 crc kubenswrapper[4875]: healthz check failed Oct 07 07:58:41 crc kubenswrapper[4875]: I1007 07:58:41.322937 4875 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-vpcnf" podUID="9e48afb0-e9d1-4d51-a992-2de00ba12ec6" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 07 07:58:41 crc kubenswrapper[4875]: I1007 07:58:41.416268 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 07:58:41 crc kubenswrapper[4875]: E1007 07:58:41.416432 4875 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 07:58:41.91640236 +0000 UTC m=+146.876172903 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 07:58:41 crc kubenswrapper[4875]: I1007 07:58:41.416943 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b5q5p\" (UID: \"2fcc08ca-5985-4132-a071-6821ef40cc5f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b5q5p" Oct 07 07:58:41 crc kubenswrapper[4875]: E1007 07:58:41.417279 4875 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 07:58:41.917265997 +0000 UTC m=+146.877036540 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b5q5p" (UID: "2fcc08ca-5985-4132-a071-6821ef40cc5f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 07:58:41 crc kubenswrapper[4875]: I1007 07:58:41.454817 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-wpqln" event={"ID":"863cddd5-0467-4ab9-b258-4ee6642bbe0c","Type":"ContainerStarted","Data":"20f665ee70942c7a8f13027057e6bb58f84cc4875c2849328307c41ef942d40f"} Oct 07 07:58:41 crc kubenswrapper[4875]: I1007 07:58:41.457497 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-pxkvz" event={"ID":"665d7a28-86ba-498f-a7b4-60d5c7e8ef8c","Type":"ContainerStarted","Data":"172f2c3098a9b009c31f675bb82c6413f6b0efb5eb40bfb1318f53f568fb2302"} Oct 07 07:58:41 crc kubenswrapper[4875]: I1007 07:58:41.459942 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-hlnjg" event={"ID":"88313132-e652-4c64-b607-5b806c93e153","Type":"ContainerStarted","Data":"495ab7dbc39dfacf2e885b92ae0f1104e2fad2c188a1261e5f4e700504f53d8a"} Oct 07 07:58:41 crc kubenswrapper[4875]: I1007 07:58:41.460870 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-558db77b4-hlnjg" Oct 07 07:58:41 crc kubenswrapper[4875]: I1007 07:58:41.462744 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-zdjs2" event={"ID":"8aaee5e1-e8fd-430c-bc6b-7ac25c49f95b","Type":"ContainerStarted","Data":"64c019656e1664305b3cec90fdb3e7493005b02e29d41deef7f6b7b85338477b"} Oct 07 07:58:41 crc kubenswrapper[4875]: I1007 07:58:41.463732 4875 patch_prober.go:28] interesting pod/oauth-openshift-558db77b4-hlnjg container/oauth-openshift namespace/openshift-authentication: Readiness probe status=failure output="Get \"https://10.217.0.8:6443/healthz\": dial tcp 10.217.0.8:6443: connect: connection refused" start-of-body= Oct 07 07:58:41 crc kubenswrapper[4875]: I1007 07:58:41.463799 4875 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-authentication/oauth-openshift-558db77b4-hlnjg" podUID="88313132-e652-4c64-b607-5b806c93e153" containerName="oauth-openshift" probeResult="failure" output="Get \"https://10.217.0.8:6443/healthz\": dial tcp 10.217.0.8:6443: connect: connection refused" Oct 07 07:58:41 crc kubenswrapper[4875]: I1007 07:58:41.483408 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-7clsb" event={"ID":"e612704a-aea6-48f6-82c1-cee4e0e77859","Type":"ContainerStarted","Data":"5581c40e23ee29168e204d2e5af8508c7e620fe8ec41264f8006aeb5c5b51387"} Oct 07 07:58:41 crc kubenswrapper[4875]: I1007 07:58:41.490476 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-7clsb" event={"ID":"e612704a-aea6-48f6-82c1-cee4e0e77859","Type":"ContainerStarted","Data":"0325254a9df74c5c58a06b4645aeadd2aafb68abfdaf5c464adda3893f5cbe35"} Oct 07 07:58:41 crc kubenswrapper[4875]: I1007 07:58:41.518737 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 07:58:41 crc kubenswrapper[4875]: E1007 07:58:41.519202 4875 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 07:58:42.019186474 +0000 UTC m=+146.978957017 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 07:58:41 crc kubenswrapper[4875]: I1007 07:58:41.522279 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-admission-controller-857f4d67dd-pxkvz" podStartSLOduration=124.522265248 podStartE2EDuration="2m4.522265248s" podCreationTimestamp="2025-10-07 07:56:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 07:58:41.519492123 +0000 UTC m=+146.479262686" watchObservedRunningTime="2025-10-07 07:58:41.522265248 +0000 UTC m=+146.482035791" Oct 07 07:58:41 crc kubenswrapper[4875]: I1007 07:58:41.522939 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-6gfc9" event={"ID":"50f94d0f-37cf-4a2e-9241-3394cbaefd14","Type":"ContainerStarted","Data":"93dbd2db716e95093c21cf603f2c98ade33121bb41a30a1598a897c6df8ce14a"} Oct 07 07:58:41 crc kubenswrapper[4875]: I1007 07:58:41.566511 4875 generic.go:334] "Generic (PLEG): container finished" podID="7f36de34-531b-4910-b322-3e3a2d0bf7f5" containerID="3ff0d3214801533169250e32a7ee6db74ed63475a5ac8c59768a1c25bfd3fd1b" exitCode=0 Oct 07 07:58:41 crc kubenswrapper[4875]: I1007 07:58:41.566683 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-wtjgx" event={"ID":"7f36de34-531b-4910-b322-3e3a2d0bf7f5","Type":"ContainerDied","Data":"3ff0d3214801533169250e32a7ee6db74ed63475a5ac8c59768a1c25bfd3fd1b"} Oct 07 07:58:41 crc kubenswrapper[4875]: I1007 07:58:41.594402 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-q7gd8" event={"ID":"8fe11f26-eb43-48e5-a2be-487a193ca3d3","Type":"ContainerStarted","Data":"9127d1c7787f2188e13573ffe39714937e145a289982af0653e610be43ba71de"} Oct 07 07:58:41 crc kubenswrapper[4875]: I1007 07:58:41.594815 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-q7gd8" event={"ID":"8fe11f26-eb43-48e5-a2be-487a193ca3d3","Type":"ContainerStarted","Data":"1e2cd686e8aef0e45d039c7b6a3c578d418f6e7e712dd904c773f0bd56b2c9cb"} Oct 07 07:58:41 crc kubenswrapper[4875]: I1007 07:58:41.595481 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-dns/dns-default-q7gd8" Oct 07 07:58:41 crc kubenswrapper[4875]: I1007 07:58:41.600625 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-558db77b4-hlnjg" podStartSLOduration=125.600613613 podStartE2EDuration="2m5.600613613s" podCreationTimestamp="2025-10-07 07:56:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 07:58:41.597983532 +0000 UTC m=+146.557754085" watchObservedRunningTime="2025-10-07 07:58:41.600613613 +0000 UTC m=+146.560384156" Oct 07 07:58:41 crc kubenswrapper[4875]: I1007 07:58:41.611304 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-cr4tx" event={"ID":"905136e9-422f-465a-95df-4a51cfbbc172","Type":"ContainerStarted","Data":"bd56690fcddc1da656ae560b9b8a943d67676ca85cbb4ad22e5ee2b334f8facf"} Oct 07 07:58:41 crc kubenswrapper[4875]: I1007 07:58:41.622982 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b5q5p\" (UID: \"2fcc08ca-5985-4132-a071-6821ef40cc5f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b5q5p" Oct 07 07:58:41 crc kubenswrapper[4875]: E1007 07:58:41.625325 4875 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 07:58:42.12531158 +0000 UTC m=+147.085082123 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b5q5p" (UID: "2fcc08ca-5985-4132-a071-6821ef40cc5f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 07:58:41 crc kubenswrapper[4875]: I1007 07:58:41.626633 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-649xl" event={"ID":"eb7d772c-6ae9-4793-8727-653f80c2d8a0","Type":"ContainerStarted","Data":"817245f80ee558d18652fe24f3e9371235abc138336d153723751793425c9b88"} Oct 07 07:58:41 crc kubenswrapper[4875]: I1007 07:58:41.640750 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver/apiserver-76f77b778f-7clsb" podStartSLOduration=125.640721554 podStartE2EDuration="2m5.640721554s" podCreationTimestamp="2025-10-07 07:56:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 07:58:41.638290629 +0000 UTC m=+146.598061172" watchObservedRunningTime="2025-10-07 07:58:41.640721554 +0000 UTC m=+146.600492097" Oct 07 07:58:41 crc kubenswrapper[4875]: I1007 07:58:41.661346 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-5khj4" event={"ID":"41dcc994-8e79-4282-8389-abc86eee67b0","Type":"ContainerStarted","Data":"fa0b9e5a39421ca60d364cdda91e080c222cc31d6596063fc60eca7318211ce4"} Oct 07 07:58:41 crc kubenswrapper[4875]: I1007 07:58:41.665943 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-5mtmp" event={"ID":"b3dea818-360c-47a0-8b92-48cb0ab39cb4","Type":"ContainerStarted","Data":"fa6f313eceb46eb6455c2a2b5c7a32c04cf45096b185eba00efcfdac3fbb6ed5"} Oct 07 07:58:41 crc kubenswrapper[4875]: I1007 07:58:41.669804 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-8r27m" event={"ID":"3e90baa3-eb1c-4077-9451-a927f4c2303b","Type":"ContainerStarted","Data":"38079c0963e4b25a0436755a55101c53fdd14036611e722f40b3077465d8e401"} Oct 07 07:58:41 crc kubenswrapper[4875]: I1007 07:58:41.669844 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-8r27m" event={"ID":"3e90baa3-eb1c-4077-9451-a927f4c2303b","Type":"ContainerStarted","Data":"28bf57d05f522c2a836fccefd699f6bd18dc6c551709866706fa11cf5126ccfa"} Oct 07 07:58:41 crc kubenswrapper[4875]: I1007 07:58:41.674408 4875 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-j9tjw container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.39:8080/healthz\": dial tcp 10.217.0.39:8080: connect: connection refused" start-of-body= Oct 07 07:58:41 crc kubenswrapper[4875]: I1007 07:58:41.674522 4875 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-j9tjw" podUID="83061376-ca6f-4ccc-8da0-bede4a497b4f" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.39:8080/healthz\": dial tcp 10.217.0.39:8080: connect: connection refused" Oct 07 07:58:41 crc kubenswrapper[4875]: I1007 07:58:41.675342 4875 patch_prober.go:28] interesting pod/downloads-7954f5f757-4fthd container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" start-of-body= Oct 07 07:58:41 crc kubenswrapper[4875]: I1007 07:58:41.675396 4875 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-4fthd" podUID="97431ef0-70e0-4c93-9ebc-ba3c9823685f" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" Oct 07 07:58:41 crc kubenswrapper[4875]: I1007 07:58:41.676079 4875 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-gk4f8 container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.7:8443/healthz\": dial tcp 10.217.0.7:8443: connect: connection refused" start-of-body= Oct 07 07:58:41 crc kubenswrapper[4875]: I1007 07:58:41.676133 4875 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-gk4f8" podUID="de07f99e-f06a-4800-96f5-ecfebf9630f2" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.7:8443/healthz\": dial tcp 10.217.0.7:8443: connect: connection refused" Oct 07 07:58:41 crc kubenswrapper[4875]: I1007 07:58:41.696369 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-8x4x6" Oct 07 07:58:41 crc kubenswrapper[4875]: I1007 07:58:41.724170 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 07:58:41 crc kubenswrapper[4875]: E1007 07:58:41.725520 4875 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 07:58:42.225503765 +0000 UTC m=+147.185274308 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 07:58:41 crc kubenswrapper[4875]: I1007 07:58:41.738642 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-649xl" podStartSLOduration=124.738626108 podStartE2EDuration="2m4.738626108s" podCreationTimestamp="2025-10-07 07:56:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 07:58:41.686009634 +0000 UTC m=+146.645780187" watchObservedRunningTime="2025-10-07 07:58:41.738626108 +0000 UTC m=+146.698396651" Oct 07 07:58:41 crc kubenswrapper[4875]: I1007 07:58:41.779732 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-zdjs2" Oct 07 07:58:41 crc kubenswrapper[4875]: I1007 07:58:41.779955 4875 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-zdjs2" Oct 07 07:58:41 crc kubenswrapper[4875]: I1007 07:58:41.808228 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-nrcsn" Oct 07 07:58:41 crc kubenswrapper[4875]: I1007 07:58:41.819205 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/dns-default-q7gd8" podStartSLOduration=7.81919009 podStartE2EDuration="7.81919009s" podCreationTimestamp="2025-10-07 07:58:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 07:58:41.785671552 +0000 UTC m=+146.745442095" watchObservedRunningTime="2025-10-07 07:58:41.81919009 +0000 UTC m=+146.778960633" Oct 07 07:58:41 crc kubenswrapper[4875]: I1007 07:58:41.827367 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b5q5p\" (UID: \"2fcc08ca-5985-4132-a071-6821ef40cc5f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b5q5p" Oct 07 07:58:41 crc kubenswrapper[4875]: E1007 07:58:41.829644 4875 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 07:58:42.32962504 +0000 UTC m=+147.289395783 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b5q5p" (UID: "2fcc08ca-5985-4132-a071-6821ef40cc5f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 07:58:41 crc kubenswrapper[4875]: I1007 07:58:41.863519 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console-operator/console-operator-58897d9998-rtps2" Oct 07 07:58:41 crc kubenswrapper[4875]: I1007 07:58:41.929748 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 07:58:41 crc kubenswrapper[4875]: E1007 07:58:41.930114 4875 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 07:58:42.430098533 +0000 UTC m=+147.389869076 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 07:58:42 crc kubenswrapper[4875]: I1007 07:58:42.046920 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns-operator/dns-operator-744455d44c-8r27m" podStartSLOduration=126.046895808 podStartE2EDuration="2m6.046895808s" podCreationTimestamp="2025-10-07 07:56:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 07:58:41.871869237 +0000 UTC m=+146.831639780" watchObservedRunningTime="2025-10-07 07:58:42.046895808 +0000 UTC m=+147.006666351" Oct 07 07:58:42 crc kubenswrapper[4875]: I1007 07:58:42.048903 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b5q5p\" (UID: \"2fcc08ca-5985-4132-a071-6821ef40cc5f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b5q5p" Oct 07 07:58:42 crc kubenswrapper[4875]: E1007 07:58:42.049312 4875 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 07:58:42.549296672 +0000 UTC m=+147.509067205 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b5q5p" (UID: "2fcc08ca-5985-4132-a071-6821ef40cc5f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 07:58:42 crc kubenswrapper[4875]: I1007 07:58:42.151343 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 07:58:42 crc kubenswrapper[4875]: E1007 07:58:42.151788 4875 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 07:58:42.651772666 +0000 UTC m=+147.611543209 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 07:58:42 crc kubenswrapper[4875]: I1007 07:58:42.227024 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-apiserver/apiserver-76f77b778f-7clsb" Oct 07 07:58:42 crc kubenswrapper[4875]: I1007 07:58:42.227475 4875 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-apiserver/apiserver-76f77b778f-7clsb" Oct 07 07:58:42 crc kubenswrapper[4875]: I1007 07:58:42.231064 4875 patch_prober.go:28] interesting pod/apiserver-76f77b778f-7clsb container/openshift-apiserver namespace/openshift-apiserver: Startup probe status=failure output="Get \"https://10.217.0.6:8443/livez\": dial tcp 10.217.0.6:8443: connect: connection refused" start-of-body= Oct 07 07:58:42 crc kubenswrapper[4875]: I1007 07:58:42.231133 4875 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-apiserver/apiserver-76f77b778f-7clsb" podUID="e612704a-aea6-48f6-82c1-cee4e0e77859" containerName="openshift-apiserver" probeResult="failure" output="Get \"https://10.217.0.6:8443/livez\": dial tcp 10.217.0.6:8443: connect: connection refused" Oct 07 07:58:42 crc kubenswrapper[4875]: I1007 07:58:42.260573 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b5q5p\" (UID: \"2fcc08ca-5985-4132-a071-6821ef40cc5f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b5q5p" Oct 07 07:58:42 crc kubenswrapper[4875]: E1007 07:58:42.260927 4875 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 07:58:42.760913756 +0000 UTC m=+147.720684299 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b5q5p" (UID: "2fcc08ca-5985-4132-a071-6821ef40cc5f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 07:58:42 crc kubenswrapper[4875]: I1007 07:58:42.325064 4875 patch_prober.go:28] interesting pod/router-default-5444994796-vpcnf container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 07 07:58:42 crc kubenswrapper[4875]: [-]has-synced failed: reason withheld Oct 07 07:58:42 crc kubenswrapper[4875]: [+]process-running ok Oct 07 07:58:42 crc kubenswrapper[4875]: healthz check failed Oct 07 07:58:42 crc kubenswrapper[4875]: I1007 07:58:42.325118 4875 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-vpcnf" podUID="9e48afb0-e9d1-4d51-a992-2de00ba12ec6" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 07 07:58:42 crc kubenswrapper[4875]: I1007 07:58:42.362257 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 07:58:42 crc kubenswrapper[4875]: E1007 07:58:42.362498 4875 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 07:58:42.862467432 +0000 UTC m=+147.822237985 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 07:58:42 crc kubenswrapper[4875]: I1007 07:58:42.404441 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-bqwr8"] Oct 07 07:58:42 crc kubenswrapper[4875]: I1007 07:58:42.406335 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-bqwr8" Oct 07 07:58:42 crc kubenswrapper[4875]: I1007 07:58:42.411297 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Oct 07 07:58:42 crc kubenswrapper[4875]: I1007 07:58:42.432986 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-bqwr8"] Oct 07 07:58:42 crc kubenswrapper[4875]: I1007 07:58:42.463913 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b5q5p\" (UID: \"2fcc08ca-5985-4132-a071-6821ef40cc5f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b5q5p" Oct 07 07:58:42 crc kubenswrapper[4875]: E1007 07:58:42.464466 4875 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 07:58:42.964442371 +0000 UTC m=+147.924212914 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b5q5p" (UID: "2fcc08ca-5985-4132-a071-6821ef40cc5f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 07:58:42 crc kubenswrapper[4875]: I1007 07:58:42.561680 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-htjvv"] Oct 07 07:58:42 crc kubenswrapper[4875]: I1007 07:58:42.562814 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-htjvv" Oct 07 07:58:42 crc kubenswrapper[4875]: I1007 07:58:42.564806 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 07:58:42 crc kubenswrapper[4875]: I1007 07:58:42.565133 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sdh6h\" (UniqueName: \"kubernetes.io/projected/09dd69c8-1a2a-40d8-8087-a8e895ba97ee-kube-api-access-sdh6h\") pod \"certified-operators-bqwr8\" (UID: \"09dd69c8-1a2a-40d8-8087-a8e895ba97ee\") " pod="openshift-marketplace/certified-operators-bqwr8" Oct 07 07:58:42 crc kubenswrapper[4875]: I1007 07:58:42.565187 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/09dd69c8-1a2a-40d8-8087-a8e895ba97ee-utilities\") pod \"certified-operators-bqwr8\" (UID: \"09dd69c8-1a2a-40d8-8087-a8e895ba97ee\") " pod="openshift-marketplace/certified-operators-bqwr8" Oct 07 07:58:42 crc kubenswrapper[4875]: I1007 07:58:42.565218 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/09dd69c8-1a2a-40d8-8087-a8e895ba97ee-catalog-content\") pod \"certified-operators-bqwr8\" (UID: \"09dd69c8-1a2a-40d8-8087-a8e895ba97ee\") " pod="openshift-marketplace/certified-operators-bqwr8" Oct 07 07:58:42 crc kubenswrapper[4875]: E1007 07:58:42.565314 4875 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 07:58:43.065300896 +0000 UTC m=+148.025071439 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 07:58:42 crc kubenswrapper[4875]: I1007 07:58:42.568532 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Oct 07 07:58:42 crc kubenswrapper[4875]: I1007 07:58:42.600835 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-htjvv"] Oct 07 07:58:42 crc kubenswrapper[4875]: I1007 07:58:42.666441 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sdh6h\" (UniqueName: \"kubernetes.io/projected/09dd69c8-1a2a-40d8-8087-a8e895ba97ee-kube-api-access-sdh6h\") pod \"certified-operators-bqwr8\" (UID: \"09dd69c8-1a2a-40d8-8087-a8e895ba97ee\") " pod="openshift-marketplace/certified-operators-bqwr8" Oct 07 07:58:42 crc kubenswrapper[4875]: I1007 07:58:42.666494 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/62c991b5-7e34-484b-8346-2db6f586c972-catalog-content\") pod \"community-operators-htjvv\" (UID: \"62c991b5-7e34-484b-8346-2db6f586c972\") " pod="openshift-marketplace/community-operators-htjvv" Oct 07 07:58:42 crc kubenswrapper[4875]: I1007 07:58:42.666535 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/62c991b5-7e34-484b-8346-2db6f586c972-utilities\") pod \"community-operators-htjvv\" (UID: \"62c991b5-7e34-484b-8346-2db6f586c972\") " pod="openshift-marketplace/community-operators-htjvv" Oct 07 07:58:42 crc kubenswrapper[4875]: I1007 07:58:42.666577 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/09dd69c8-1a2a-40d8-8087-a8e895ba97ee-utilities\") pod \"certified-operators-bqwr8\" (UID: \"09dd69c8-1a2a-40d8-8087-a8e895ba97ee\") " pod="openshift-marketplace/certified-operators-bqwr8" Oct 07 07:58:42 crc kubenswrapper[4875]: I1007 07:58:42.666604 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/09dd69c8-1a2a-40d8-8087-a8e895ba97ee-catalog-content\") pod \"certified-operators-bqwr8\" (UID: \"09dd69c8-1a2a-40d8-8087-a8e895ba97ee\") " pod="openshift-marketplace/certified-operators-bqwr8" Oct 07 07:58:42 crc kubenswrapper[4875]: I1007 07:58:42.666742 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b5q5p\" (UID: \"2fcc08ca-5985-4132-a071-6821ef40cc5f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b5q5p" Oct 07 07:58:42 crc kubenswrapper[4875]: I1007 07:58:42.666824 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nsp8f\" (UniqueName: \"kubernetes.io/projected/62c991b5-7e34-484b-8346-2db6f586c972-kube-api-access-nsp8f\") pod \"community-operators-htjvv\" (UID: \"62c991b5-7e34-484b-8346-2db6f586c972\") " pod="openshift-marketplace/community-operators-htjvv" Oct 07 07:58:42 crc kubenswrapper[4875]: E1007 07:58:42.667309 4875 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 07:58:43.167294746 +0000 UTC m=+148.127065289 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b5q5p" (UID: "2fcc08ca-5985-4132-a071-6821ef40cc5f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 07:58:42 crc kubenswrapper[4875]: I1007 07:58:42.667733 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/09dd69c8-1a2a-40d8-8087-a8e895ba97ee-catalog-content\") pod \"certified-operators-bqwr8\" (UID: \"09dd69c8-1a2a-40d8-8087-a8e895ba97ee\") " pod="openshift-marketplace/certified-operators-bqwr8" Oct 07 07:58:42 crc kubenswrapper[4875]: I1007 07:58:42.667831 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/09dd69c8-1a2a-40d8-8087-a8e895ba97ee-utilities\") pod \"certified-operators-bqwr8\" (UID: \"09dd69c8-1a2a-40d8-8087-a8e895ba97ee\") " pod="openshift-marketplace/certified-operators-bqwr8" Oct 07 07:58:42 crc kubenswrapper[4875]: I1007 07:58:42.674949 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-wtjgx" event={"ID":"7f36de34-531b-4910-b322-3e3a2d0bf7f5","Type":"ContainerStarted","Data":"e510fe72f21aa227be186f96d66d0d79d411363945786b8e8acb8b3ff0f613dd"} Oct 07 07:58:42 crc kubenswrapper[4875]: I1007 07:58:42.675955 4875 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-qtl66 container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.34:5443/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" start-of-body= Oct 07 07:58:42 crc kubenswrapper[4875]: I1007 07:58:42.676005 4875 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-qtl66" podUID="d52a533b-1e88-42e4-b9a6-e1af57eeff28" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.34:5443/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 07 07:58:42 crc kubenswrapper[4875]: I1007 07:58:42.692460 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-j9tjw" Oct 07 07:58:42 crc kubenswrapper[4875]: I1007 07:58:42.722381 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sdh6h\" (UniqueName: \"kubernetes.io/projected/09dd69c8-1a2a-40d8-8087-a8e895ba97ee-kube-api-access-sdh6h\") pod \"certified-operators-bqwr8\" (UID: \"09dd69c8-1a2a-40d8-8087-a8e895ba97ee\") " pod="openshift-marketplace/certified-operators-bqwr8" Oct 07 07:58:42 crc kubenswrapper[4875]: I1007 07:58:42.725777 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-config-operator/openshift-config-operator-7777fb866f-wtjgx" podStartSLOduration=126.725764841 podStartE2EDuration="2m6.725764841s" podCreationTimestamp="2025-10-07 07:56:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 07:58:42.706990265 +0000 UTC m=+147.666760808" watchObservedRunningTime="2025-10-07 07:58:42.725764841 +0000 UTC m=+147.685535384" Oct 07 07:58:42 crc kubenswrapper[4875]: I1007 07:58:42.728726 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-879f6c89f-gk4f8" Oct 07 07:58:42 crc kubenswrapper[4875]: I1007 07:58:42.729132 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-x6gdw"] Oct 07 07:58:42 crc kubenswrapper[4875]: I1007 07:58:42.730207 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-x6gdw" Oct 07 07:58:42 crc kubenswrapper[4875]: I1007 07:58:42.767673 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-x6gdw"] Oct 07 07:58:42 crc kubenswrapper[4875]: I1007 07:58:42.769221 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 07:58:42 crc kubenswrapper[4875]: E1007 07:58:42.769704 4875 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 07:58:43.269684168 +0000 UTC m=+148.229454711 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 07:58:42 crc kubenswrapper[4875]: I1007 07:58:42.770040 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/62c991b5-7e34-484b-8346-2db6f586c972-utilities\") pod \"community-operators-htjvv\" (UID: \"62c991b5-7e34-484b-8346-2db6f586c972\") " pod="openshift-marketplace/community-operators-htjvv" Oct 07 07:58:42 crc kubenswrapper[4875]: I1007 07:58:42.770716 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b5q5p\" (UID: \"2fcc08ca-5985-4132-a071-6821ef40cc5f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b5q5p" Oct 07 07:58:42 crc kubenswrapper[4875]: I1007 07:58:42.770915 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nsp8f\" (UniqueName: \"kubernetes.io/projected/62c991b5-7e34-484b-8346-2db6f586c972-kube-api-access-nsp8f\") pod \"community-operators-htjvv\" (UID: \"62c991b5-7e34-484b-8346-2db6f586c972\") " pod="openshift-marketplace/community-operators-htjvv" Oct 07 07:58:42 crc kubenswrapper[4875]: I1007 07:58:42.771257 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/62c991b5-7e34-484b-8346-2db6f586c972-catalog-content\") pod \"community-operators-htjvv\" (UID: \"62c991b5-7e34-484b-8346-2db6f586c972\") " pod="openshift-marketplace/community-operators-htjvv" Oct 07 07:58:42 crc kubenswrapper[4875]: I1007 07:58:42.770494 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/62c991b5-7e34-484b-8346-2db6f586c972-utilities\") pod \"community-operators-htjvv\" (UID: \"62c991b5-7e34-484b-8346-2db6f586c972\") " pod="openshift-marketplace/community-operators-htjvv" Oct 07 07:58:42 crc kubenswrapper[4875]: I1007 07:58:42.779217 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/62c991b5-7e34-484b-8346-2db6f586c972-catalog-content\") pod \"community-operators-htjvv\" (UID: \"62c991b5-7e34-484b-8346-2db6f586c972\") " pod="openshift-marketplace/community-operators-htjvv" Oct 07 07:58:42 crc kubenswrapper[4875]: E1007 07:58:42.787670 4875 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 07:58:43.28764847 +0000 UTC m=+148.247419013 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b5q5p" (UID: "2fcc08ca-5985-4132-a071-6821ef40cc5f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 07:58:42 crc kubenswrapper[4875]: I1007 07:58:42.832822 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nsp8f\" (UniqueName: \"kubernetes.io/projected/62c991b5-7e34-484b-8346-2db6f586c972-kube-api-access-nsp8f\") pod \"community-operators-htjvv\" (UID: \"62c991b5-7e34-484b-8346-2db6f586c972\") " pod="openshift-marketplace/community-operators-htjvv" Oct 07 07:58:42 crc kubenswrapper[4875]: I1007 07:58:42.874418 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 07:58:42 crc kubenswrapper[4875]: I1007 07:58:42.874840 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n4tm8\" (UniqueName: \"kubernetes.io/projected/18dc718c-4aa1-460c-b3bf-62df2a1d277b-kube-api-access-n4tm8\") pod \"certified-operators-x6gdw\" (UID: \"18dc718c-4aa1-460c-b3bf-62df2a1d277b\") " pod="openshift-marketplace/certified-operators-x6gdw" Oct 07 07:58:42 crc kubenswrapper[4875]: I1007 07:58:42.874899 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/18dc718c-4aa1-460c-b3bf-62df2a1d277b-catalog-content\") pod \"certified-operators-x6gdw\" (UID: \"18dc718c-4aa1-460c-b3bf-62df2a1d277b\") " pod="openshift-marketplace/certified-operators-x6gdw" Oct 07 07:58:42 crc kubenswrapper[4875]: I1007 07:58:42.875176 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/18dc718c-4aa1-460c-b3bf-62df2a1d277b-utilities\") pod \"certified-operators-x6gdw\" (UID: \"18dc718c-4aa1-460c-b3bf-62df2a1d277b\") " pod="openshift-marketplace/certified-operators-x6gdw" Oct 07 07:58:42 crc kubenswrapper[4875]: E1007 07:58:42.876245 4875 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 07:58:43.376228918 +0000 UTC m=+148.335999461 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 07:58:42 crc kubenswrapper[4875]: I1007 07:58:42.913116 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-htjvv" Oct 07 07:58:42 crc kubenswrapper[4875]: I1007 07:58:42.954674 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-wjppx"] Oct 07 07:58:42 crc kubenswrapper[4875]: I1007 07:58:42.956348 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-wjppx" Oct 07 07:58:42 crc kubenswrapper[4875]: I1007 07:58:42.961911 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-wjppx"] Oct 07 07:58:42 crc kubenswrapper[4875]: I1007 07:58:42.976635 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/18dc718c-4aa1-460c-b3bf-62df2a1d277b-utilities\") pod \"certified-operators-x6gdw\" (UID: \"18dc718c-4aa1-460c-b3bf-62df2a1d277b\") " pod="openshift-marketplace/certified-operators-x6gdw" Oct 07 07:58:42 crc kubenswrapper[4875]: I1007 07:58:42.976714 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n4tm8\" (UniqueName: \"kubernetes.io/projected/18dc718c-4aa1-460c-b3bf-62df2a1d277b-kube-api-access-n4tm8\") pod \"certified-operators-x6gdw\" (UID: \"18dc718c-4aa1-460c-b3bf-62df2a1d277b\") " pod="openshift-marketplace/certified-operators-x6gdw" Oct 07 07:58:42 crc kubenswrapper[4875]: I1007 07:58:42.976735 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/18dc718c-4aa1-460c-b3bf-62df2a1d277b-catalog-content\") pod \"certified-operators-x6gdw\" (UID: \"18dc718c-4aa1-460c-b3bf-62df2a1d277b\") " pod="openshift-marketplace/certified-operators-x6gdw" Oct 07 07:58:42 crc kubenswrapper[4875]: I1007 07:58:42.976809 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b5q5p\" (UID: \"2fcc08ca-5985-4132-a071-6821ef40cc5f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b5q5p" Oct 07 07:58:42 crc kubenswrapper[4875]: E1007 07:58:42.977113 4875 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 07:58:43.477097883 +0000 UTC m=+148.436868426 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b5q5p" (UID: "2fcc08ca-5985-4132-a071-6821ef40cc5f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 07:58:42 crc kubenswrapper[4875]: I1007 07:58:42.988923 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/18dc718c-4aa1-460c-b3bf-62df2a1d277b-catalog-content\") pod \"certified-operators-x6gdw\" (UID: \"18dc718c-4aa1-460c-b3bf-62df2a1d277b\") " pod="openshift-marketplace/certified-operators-x6gdw" Oct 07 07:58:42 crc kubenswrapper[4875]: I1007 07:58:42.988969 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/18dc718c-4aa1-460c-b3bf-62df2a1d277b-utilities\") pod \"certified-operators-x6gdw\" (UID: \"18dc718c-4aa1-460c-b3bf-62df2a1d277b\") " pod="openshift-marketplace/certified-operators-x6gdw" Oct 07 07:58:43 crc kubenswrapper[4875]: I1007 07:58:43.011524 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n4tm8\" (UniqueName: \"kubernetes.io/projected/18dc718c-4aa1-460c-b3bf-62df2a1d277b-kube-api-access-n4tm8\") pod \"certified-operators-x6gdw\" (UID: \"18dc718c-4aa1-460c-b3bf-62df2a1d277b\") " pod="openshift-marketplace/certified-operators-x6gdw" Oct 07 07:58:43 crc kubenswrapper[4875]: I1007 07:58:43.024158 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-bqwr8" Oct 07 07:58:43 crc kubenswrapper[4875]: I1007 07:58:43.045219 4875 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-zdjs2" Oct 07 07:58:43 crc kubenswrapper[4875]: I1007 07:58:43.079739 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 07:58:43 crc kubenswrapper[4875]: I1007 07:58:43.080068 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j2dgx\" (UniqueName: \"kubernetes.io/projected/59aafada-cdff-4904-a432-0b9e632a8e8f-kube-api-access-j2dgx\") pod \"community-operators-wjppx\" (UID: \"59aafada-cdff-4904-a432-0b9e632a8e8f\") " pod="openshift-marketplace/community-operators-wjppx" Oct 07 07:58:43 crc kubenswrapper[4875]: I1007 07:58:43.080146 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/59aafada-cdff-4904-a432-0b9e632a8e8f-utilities\") pod \"community-operators-wjppx\" (UID: \"59aafada-cdff-4904-a432-0b9e632a8e8f\") " pod="openshift-marketplace/community-operators-wjppx" Oct 07 07:58:43 crc kubenswrapper[4875]: I1007 07:58:43.080191 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/59aafada-cdff-4904-a432-0b9e632a8e8f-catalog-content\") pod \"community-operators-wjppx\" (UID: \"59aafada-cdff-4904-a432-0b9e632a8e8f\") " pod="openshift-marketplace/community-operators-wjppx" Oct 07 07:58:43 crc kubenswrapper[4875]: E1007 07:58:43.080326 4875 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 07:58:43.58030385 +0000 UTC m=+148.540074393 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 07:58:43 crc kubenswrapper[4875]: I1007 07:58:43.104190 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-x6gdw" Oct 07 07:58:43 crc kubenswrapper[4875]: I1007 07:58:43.181105 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/59aafada-cdff-4904-a432-0b9e632a8e8f-utilities\") pod \"community-operators-wjppx\" (UID: \"59aafada-cdff-4904-a432-0b9e632a8e8f\") " pod="openshift-marketplace/community-operators-wjppx" Oct 07 07:58:43 crc kubenswrapper[4875]: I1007 07:58:43.181558 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/59aafada-cdff-4904-a432-0b9e632a8e8f-catalog-content\") pod \"community-operators-wjppx\" (UID: \"59aafada-cdff-4904-a432-0b9e632a8e8f\") " pod="openshift-marketplace/community-operators-wjppx" Oct 07 07:58:43 crc kubenswrapper[4875]: I1007 07:58:43.181611 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b5q5p\" (UID: \"2fcc08ca-5985-4132-a071-6821ef40cc5f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b5q5p" Oct 07 07:58:43 crc kubenswrapper[4875]: I1007 07:58:43.181636 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j2dgx\" (UniqueName: \"kubernetes.io/projected/59aafada-cdff-4904-a432-0b9e632a8e8f-kube-api-access-j2dgx\") pod \"community-operators-wjppx\" (UID: \"59aafada-cdff-4904-a432-0b9e632a8e8f\") " pod="openshift-marketplace/community-operators-wjppx" Oct 07 07:58:43 crc kubenswrapper[4875]: I1007 07:58:43.182399 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/59aafada-cdff-4904-a432-0b9e632a8e8f-utilities\") pod \"community-operators-wjppx\" (UID: \"59aafada-cdff-4904-a432-0b9e632a8e8f\") " pod="openshift-marketplace/community-operators-wjppx" Oct 07 07:58:43 crc kubenswrapper[4875]: I1007 07:58:43.182719 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/59aafada-cdff-4904-a432-0b9e632a8e8f-catalog-content\") pod \"community-operators-wjppx\" (UID: \"59aafada-cdff-4904-a432-0b9e632a8e8f\") " pod="openshift-marketplace/community-operators-wjppx" Oct 07 07:58:43 crc kubenswrapper[4875]: E1007 07:58:43.182971 4875 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 07:58:43.682961261 +0000 UTC m=+148.642731804 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b5q5p" (UID: "2fcc08ca-5985-4132-a071-6821ef40cc5f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 07:58:43 crc kubenswrapper[4875]: I1007 07:58:43.202204 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-qtl66" Oct 07 07:58:43 crc kubenswrapper[4875]: I1007 07:58:43.230979 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j2dgx\" (UniqueName: \"kubernetes.io/projected/59aafada-cdff-4904-a432-0b9e632a8e8f-kube-api-access-j2dgx\") pod \"community-operators-wjppx\" (UID: \"59aafada-cdff-4904-a432-0b9e632a8e8f\") " pod="openshift-marketplace/community-operators-wjppx" Oct 07 07:58:43 crc kubenswrapper[4875]: I1007 07:58:43.282733 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 07:58:43 crc kubenswrapper[4875]: E1007 07:58:43.283159 4875 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 07:58:43.783141625 +0000 UTC m=+148.742912168 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 07:58:43 crc kubenswrapper[4875]: I1007 07:58:43.317253 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-wjppx" Oct 07 07:58:43 crc kubenswrapper[4875]: I1007 07:58:43.334213 4875 patch_prober.go:28] interesting pod/router-default-5444994796-vpcnf container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 07 07:58:43 crc kubenswrapper[4875]: [-]has-synced failed: reason withheld Oct 07 07:58:43 crc kubenswrapper[4875]: [+]process-running ok Oct 07 07:58:43 crc kubenswrapper[4875]: healthz check failed Oct 07 07:58:43 crc kubenswrapper[4875]: I1007 07:58:43.334329 4875 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-vpcnf" podUID="9e48afb0-e9d1-4d51-a992-2de00ba12ec6" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 07 07:58:43 crc kubenswrapper[4875]: I1007 07:58:43.385610 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b5q5p\" (UID: \"2fcc08ca-5985-4132-a071-6821ef40cc5f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b5q5p" Oct 07 07:58:43 crc kubenswrapper[4875]: E1007 07:58:43.385942 4875 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 07:58:43.885930669 +0000 UTC m=+148.845701212 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b5q5p" (UID: "2fcc08ca-5985-4132-a071-6821ef40cc5f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 07:58:43 crc kubenswrapper[4875]: I1007 07:58:43.489631 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 07:58:43 crc kubenswrapper[4875]: E1007 07:58:43.490121 4875 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 07:58:43.990102356 +0000 UTC m=+148.949872889 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 07:58:43 crc kubenswrapper[4875]: I1007 07:58:43.602231 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 07:58:43 crc kubenswrapper[4875]: I1007 07:58:43.602289 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 07:58:43 crc kubenswrapper[4875]: I1007 07:58:43.602320 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 07:58:43 crc kubenswrapper[4875]: I1007 07:58:43.602343 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 07:58:43 crc kubenswrapper[4875]: I1007 07:58:43.602375 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b5q5p\" (UID: \"2fcc08ca-5985-4132-a071-6821ef40cc5f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b5q5p" Oct 07 07:58:43 crc kubenswrapper[4875]: E1007 07:58:43.602773 4875 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 07:58:44.102755314 +0000 UTC m=+149.062525857 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b5q5p" (UID: "2fcc08ca-5985-4132-a071-6821ef40cc5f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 07:58:43 crc kubenswrapper[4875]: I1007 07:58:43.611415 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 07:58:43 crc kubenswrapper[4875]: I1007 07:58:43.620928 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 07:58:43 crc kubenswrapper[4875]: I1007 07:58:43.621068 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 07:58:43 crc kubenswrapper[4875]: I1007 07:58:43.625797 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 07:58:43 crc kubenswrapper[4875]: I1007 07:58:43.677224 4875 patch_prober.go:28] interesting pod/oauth-openshift-558db77b4-hlnjg container/oauth-openshift namespace/openshift-authentication: Readiness probe status=failure output="Get \"https://10.217.0.8:6443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Oct 07 07:58:43 crc kubenswrapper[4875]: I1007 07:58:43.677296 4875 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-authentication/oauth-openshift-558db77b4-hlnjg" podUID="88313132-e652-4c64-b607-5b806c93e153" containerName="oauth-openshift" probeResult="failure" output="Get \"https://10.217.0.8:6443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Oct 07 07:58:43 crc kubenswrapper[4875]: I1007 07:58:43.709453 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 07:58:43 crc kubenswrapper[4875]: E1007 07:58:43.709931 4875 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 07:58:44.209914682 +0000 UTC m=+149.169685225 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 07:58:43 crc kubenswrapper[4875]: I1007 07:58:43.716193 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 07:58:43 crc kubenswrapper[4875]: I1007 07:58:43.723942 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-5khj4" event={"ID":"41dcc994-8e79-4282-8389-abc86eee67b0","Type":"ContainerStarted","Data":"8169d8e91c3fd2e8123144ce87dd31d30dc35af1dccc6b2ee2f8094de2bc7ebb"} Oct 07 07:58:43 crc kubenswrapper[4875]: I1007 07:58:43.724014 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-config-operator/openshift-config-operator-7777fb866f-wtjgx" Oct 07 07:58:43 crc kubenswrapper[4875]: I1007 07:58:43.724031 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-htjvv"] Oct 07 07:58:43 crc kubenswrapper[4875]: I1007 07:58:43.724305 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 07:58:43 crc kubenswrapper[4875]: I1007 07:58:43.771205 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-zdjs2" Oct 07 07:58:43 crc kubenswrapper[4875]: I1007 07:58:43.812000 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 07:58:43 crc kubenswrapper[4875]: I1007 07:58:43.813270 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b5q5p\" (UID: \"2fcc08ca-5985-4132-a071-6821ef40cc5f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b5q5p" Oct 07 07:58:43 crc kubenswrapper[4875]: E1007 07:58:43.822068 4875 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 07:58:44.322050904 +0000 UTC m=+149.281821447 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b5q5p" (UID: "2fcc08ca-5985-4132-a071-6821ef40cc5f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 07:58:43 crc kubenswrapper[4875]: I1007 07:58:43.916322 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 07:58:43 crc kubenswrapper[4875]: E1007 07:58:43.916959 4875 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 07:58:44.416941515 +0000 UTC m=+149.376712058 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 07:58:44 crc kubenswrapper[4875]: I1007 07:58:44.021567 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b5q5p\" (UID: \"2fcc08ca-5985-4132-a071-6821ef40cc5f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b5q5p" Oct 07 07:58:44 crc kubenswrapper[4875]: E1007 07:58:44.021915 4875 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 07:58:44.521891366 +0000 UTC m=+149.481661909 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b5q5p" (UID: "2fcc08ca-5985-4132-a071-6821ef40cc5f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 07:58:44 crc kubenswrapper[4875]: I1007 07:58:44.123827 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 07:58:44 crc kubenswrapper[4875]: E1007 07:58:44.124440 4875 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 07:58:44.624417332 +0000 UTC m=+149.584187865 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 07:58:44 crc kubenswrapper[4875]: I1007 07:58:44.226904 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b5q5p\" (UID: \"2fcc08ca-5985-4132-a071-6821ef40cc5f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b5q5p" Oct 07 07:58:44 crc kubenswrapper[4875]: E1007 07:58:44.227257 4875 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 07:58:44.727241818 +0000 UTC m=+149.687012361 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b5q5p" (UID: "2fcc08ca-5985-4132-a071-6821ef40cc5f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 07:58:44 crc kubenswrapper[4875]: I1007 07:58:44.319250 4875 patch_prober.go:28] interesting pod/router-default-5444994796-vpcnf container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 07 07:58:44 crc kubenswrapper[4875]: [-]has-synced failed: reason withheld Oct 07 07:58:44 crc kubenswrapper[4875]: [+]process-running ok Oct 07 07:58:44 crc kubenswrapper[4875]: healthz check failed Oct 07 07:58:44 crc kubenswrapper[4875]: I1007 07:58:44.319352 4875 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-vpcnf" podUID="9e48afb0-e9d1-4d51-a992-2de00ba12ec6" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 07 07:58:44 crc kubenswrapper[4875]: I1007 07:58:44.328829 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 07:58:44 crc kubenswrapper[4875]: E1007 07:58:44.329053 4875 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 07:58:44.82901786 +0000 UTC m=+149.788788403 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 07:58:44 crc kubenswrapper[4875]: I1007 07:58:44.329118 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b5q5p\" (UID: \"2fcc08ca-5985-4132-a071-6821ef40cc5f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b5q5p" Oct 07 07:58:44 crc kubenswrapper[4875]: E1007 07:58:44.329570 4875 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 07:58:44.829561567 +0000 UTC m=+149.789332110 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b5q5p" (UID: "2fcc08ca-5985-4132-a071-6821ef40cc5f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 07:58:44 crc kubenswrapper[4875]: I1007 07:58:44.393424 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-wjppx"] Oct 07 07:58:44 crc kubenswrapper[4875]: I1007 07:58:44.432068 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 07:58:44 crc kubenswrapper[4875]: E1007 07:58:44.432556 4875 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 07:58:44.932537367 +0000 UTC m=+149.892307910 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 07:58:44 crc kubenswrapper[4875]: I1007 07:58:44.460302 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-bqwr8"] Oct 07 07:58:44 crc kubenswrapper[4875]: I1007 07:58:44.487927 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-x6gdw"] Oct 07 07:58:44 crc kubenswrapper[4875]: I1007 07:58:44.531225 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-6kwkt"] Oct 07 07:58:44 crc kubenswrapper[4875]: I1007 07:58:44.532601 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-6kwkt" Oct 07 07:58:44 crc kubenswrapper[4875]: I1007 07:58:44.536349 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Oct 07 07:58:44 crc kubenswrapper[4875]: I1007 07:58:44.539018 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b5q5p\" (UID: \"2fcc08ca-5985-4132-a071-6821ef40cc5f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b5q5p" Oct 07 07:58:44 crc kubenswrapper[4875]: E1007 07:58:44.540838 4875 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 07:58:45.040820171 +0000 UTC m=+150.000590714 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b5q5p" (UID: "2fcc08ca-5985-4132-a071-6821ef40cc5f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 07:58:44 crc kubenswrapper[4875]: W1007 07:58:44.570656 4875 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod18dc718c_4aa1_460c_b3bf_62df2a1d277b.slice/crio-b172f4eaf74ad9bd9047ce5ca1b66e28cf3b88622d4d4d447dcd77770ccd340e WatchSource:0}: Error finding container b172f4eaf74ad9bd9047ce5ca1b66e28cf3b88622d4d4d447dcd77770ccd340e: Status 404 returned error can't find the container with id b172f4eaf74ad9bd9047ce5ca1b66e28cf3b88622d4d4d447dcd77770ccd340e Oct 07 07:58:44 crc kubenswrapper[4875]: I1007 07:58:44.585679 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-6kwkt"] Oct 07 07:58:44 crc kubenswrapper[4875]: I1007 07:58:44.613664 4875 plugin_watcher.go:194] "Adding socket path or updating timestamp to desired state cache" path="/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock" Oct 07 07:58:44 crc kubenswrapper[4875]: I1007 07:58:44.649592 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 07:58:44 crc kubenswrapper[4875]: I1007 07:58:44.651548 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f4b26103-9634-4ff7-b3bd-d3819c51643f-utilities\") pod \"redhat-marketplace-6kwkt\" (UID: \"f4b26103-9634-4ff7-b3bd-d3819c51643f\") " pod="openshift-marketplace/redhat-marketplace-6kwkt" Oct 07 07:58:44 crc kubenswrapper[4875]: I1007 07:58:44.651681 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-956mx\" (UniqueName: \"kubernetes.io/projected/f4b26103-9634-4ff7-b3bd-d3819c51643f-kube-api-access-956mx\") pod \"redhat-marketplace-6kwkt\" (UID: \"f4b26103-9634-4ff7-b3bd-d3819c51643f\") " pod="openshift-marketplace/redhat-marketplace-6kwkt" Oct 07 07:58:44 crc kubenswrapper[4875]: E1007 07:58:44.652061 4875 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 07:58:45.152016153 +0000 UTC m=+150.111786696 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 07:58:44 crc kubenswrapper[4875]: I1007 07:58:44.652305 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f4b26103-9634-4ff7-b3bd-d3819c51643f-catalog-content\") pod \"redhat-marketplace-6kwkt\" (UID: \"f4b26103-9634-4ff7-b3bd-d3819c51643f\") " pod="openshift-marketplace/redhat-marketplace-6kwkt" Oct 07 07:58:44 crc kubenswrapper[4875]: I1007 07:58:44.652387 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b5q5p\" (UID: \"2fcc08ca-5985-4132-a071-6821ef40cc5f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b5q5p" Oct 07 07:58:44 crc kubenswrapper[4875]: E1007 07:58:44.653143 4875 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 07:58:45.153118027 +0000 UTC m=+150.112888570 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b5q5p" (UID: "2fcc08ca-5985-4132-a071-6821ef40cc5f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 07:58:44 crc kubenswrapper[4875]: I1007 07:58:44.754762 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 07:58:44 crc kubenswrapper[4875]: I1007 07:58:44.755005 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f4b26103-9634-4ff7-b3bd-d3819c51643f-catalog-content\") pod \"redhat-marketplace-6kwkt\" (UID: \"f4b26103-9634-4ff7-b3bd-d3819c51643f\") " pod="openshift-marketplace/redhat-marketplace-6kwkt" Oct 07 07:58:44 crc kubenswrapper[4875]: I1007 07:58:44.755051 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f4b26103-9634-4ff7-b3bd-d3819c51643f-utilities\") pod \"redhat-marketplace-6kwkt\" (UID: \"f4b26103-9634-4ff7-b3bd-d3819c51643f\") " pod="openshift-marketplace/redhat-marketplace-6kwkt" Oct 07 07:58:44 crc kubenswrapper[4875]: I1007 07:58:44.755084 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-956mx\" (UniqueName: \"kubernetes.io/projected/f4b26103-9634-4ff7-b3bd-d3819c51643f-kube-api-access-956mx\") pod \"redhat-marketplace-6kwkt\" (UID: \"f4b26103-9634-4ff7-b3bd-d3819c51643f\") " pod="openshift-marketplace/redhat-marketplace-6kwkt" Oct 07 07:58:44 crc kubenswrapper[4875]: E1007 07:58:44.755145 4875 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 07:58:45.255118326 +0000 UTC m=+150.214888869 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 07:58:44 crc kubenswrapper[4875]: I1007 07:58:44.755671 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f4b26103-9634-4ff7-b3bd-d3819c51643f-catalog-content\") pod \"redhat-marketplace-6kwkt\" (UID: \"f4b26103-9634-4ff7-b3bd-d3819c51643f\") " pod="openshift-marketplace/redhat-marketplace-6kwkt" Oct 07 07:58:44 crc kubenswrapper[4875]: I1007 07:58:44.755763 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f4b26103-9634-4ff7-b3bd-d3819c51643f-utilities\") pod \"redhat-marketplace-6kwkt\" (UID: \"f4b26103-9634-4ff7-b3bd-d3819c51643f\") " pod="openshift-marketplace/redhat-marketplace-6kwkt" Oct 07 07:58:44 crc kubenswrapper[4875]: I1007 07:58:44.793566 4875 generic.go:334] "Generic (PLEG): container finished" podID="62c991b5-7e34-484b-8346-2db6f586c972" containerID="506685b334cc3689a22fb550d5fc22f85d86835cdb2b5a7475eee7b1df4cfb01" exitCode=0 Oct 07 07:58:44 crc kubenswrapper[4875]: I1007 07:58:44.793682 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-htjvv" event={"ID":"62c991b5-7e34-484b-8346-2db6f586c972","Type":"ContainerDied","Data":"506685b334cc3689a22fb550d5fc22f85d86835cdb2b5a7475eee7b1df4cfb01"} Oct 07 07:58:44 crc kubenswrapper[4875]: I1007 07:58:44.793708 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-htjvv" event={"ID":"62c991b5-7e34-484b-8346-2db6f586c972","Type":"ContainerStarted","Data":"9e4e9dfca91935a3002f398d89fa237ca85f46355d7aa924ec137f8e2001078b"} Oct 07 07:58:44 crc kubenswrapper[4875]: I1007 07:58:44.797127 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-956mx\" (UniqueName: \"kubernetes.io/projected/f4b26103-9634-4ff7-b3bd-d3819c51643f-kube-api-access-956mx\") pod \"redhat-marketplace-6kwkt\" (UID: \"f4b26103-9634-4ff7-b3bd-d3819c51643f\") " pod="openshift-marketplace/redhat-marketplace-6kwkt" Oct 07 07:58:44 crc kubenswrapper[4875]: I1007 07:58:44.799659 4875 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 07 07:58:44 crc kubenswrapper[4875]: I1007 07:58:44.806746 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-x6gdw" event={"ID":"18dc718c-4aa1-460c-b3bf-62df2a1d277b","Type":"ContainerStarted","Data":"b172f4eaf74ad9bd9047ce5ca1b66e28cf3b88622d4d4d447dcd77770ccd340e"} Oct 07 07:58:44 crc kubenswrapper[4875]: I1007 07:58:44.823947 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wjppx" event={"ID":"59aafada-cdff-4904-a432-0b9e632a8e8f","Type":"ContainerStarted","Data":"fa04d0b1c8e9234ef09b58e873de1de62354bd1aec0103150e54c3f35f2feb23"} Oct 07 07:58:44 crc kubenswrapper[4875]: I1007 07:58:44.824000 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wjppx" event={"ID":"59aafada-cdff-4904-a432-0b9e632a8e8f","Type":"ContainerStarted","Data":"32d5fb57de668f70403e0705524ecf92bbd33474b40e2e78c3c483e24a24a411"} Oct 07 07:58:44 crc kubenswrapper[4875]: I1007 07:58:44.838179 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-5khj4" event={"ID":"41dcc994-8e79-4282-8389-abc86eee67b0","Type":"ContainerStarted","Data":"37926cbc9f5b512a94288d734c0661eb74483cbef24a498d783830b97988522b"} Oct 07 07:58:44 crc kubenswrapper[4875]: I1007 07:58:44.852787 4875 generic.go:334] "Generic (PLEG): container finished" podID="2da55ba3-88e7-4cda-8ad9-b5945e39e991" containerID="bbdb2f9ebc9cb3dde8e0fb046436b4bacec8657642ffd439302fcc99b2579d3e" exitCode=0 Oct 07 07:58:44 crc kubenswrapper[4875]: I1007 07:58:44.852962 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29330385-lc8x5" event={"ID":"2da55ba3-88e7-4cda-8ad9-b5945e39e991","Type":"ContainerDied","Data":"bbdb2f9ebc9cb3dde8e0fb046436b4bacec8657642ffd439302fcc99b2579d3e"} Oct 07 07:58:44 crc kubenswrapper[4875]: I1007 07:58:44.856821 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-bqwr8" event={"ID":"09dd69c8-1a2a-40d8-8087-a8e895ba97ee","Type":"ContainerStarted","Data":"a2af927e01a51e2aa491a1f46157152c369daf0372cd852ee164d0171d15985b"} Oct 07 07:58:44 crc kubenswrapper[4875]: I1007 07:58:44.861036 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b5q5p\" (UID: \"2fcc08ca-5985-4132-a071-6821ef40cc5f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b5q5p" Oct 07 07:58:44 crc kubenswrapper[4875]: E1007 07:58:44.861460 4875 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 07:58:45.361443319 +0000 UTC m=+150.321213852 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b5q5p" (UID: "2fcc08ca-5985-4132-a071-6821ef40cc5f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 07:58:44 crc kubenswrapper[4875]: I1007 07:58:44.884325 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-6kwkt" Oct 07 07:58:44 crc kubenswrapper[4875]: I1007 07:58:44.885171 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-config-operator/openshift-config-operator-7777fb866f-wtjgx" Oct 07 07:58:44 crc kubenswrapper[4875]: I1007 07:58:44.918278 4875 reconciler.go:161] "OperationExecutor.RegisterPlugin started" plugin={"SocketPath":"/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock","Timestamp":"2025-10-07T07:58:44.613702667Z","Handler":null,"Name":""} Oct 07 07:58:44 crc kubenswrapper[4875]: I1007 07:58:44.932620 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-4z9kn"] Oct 07 07:58:44 crc kubenswrapper[4875]: I1007 07:58:44.933934 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-4z9kn" Oct 07 07:58:44 crc kubenswrapper[4875]: I1007 07:58:44.933985 4875 csi_plugin.go:100] kubernetes.io/csi: Trying to validate a new CSI Driver with name: kubevirt.io.hostpath-provisioner endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock versions: 1.0.0 Oct 07 07:58:44 crc kubenswrapper[4875]: I1007 07:58:44.934036 4875 csi_plugin.go:113] kubernetes.io/csi: Register new plugin with name: kubevirt.io.hostpath-provisioner at endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock Oct 07 07:58:44 crc kubenswrapper[4875]: I1007 07:58:44.962109 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-4z9kn"] Oct 07 07:58:44 crc kubenswrapper[4875]: I1007 07:58:44.962763 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 07:58:44 crc kubenswrapper[4875]: I1007 07:58:44.986444 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Oct 07 07:58:44 crc kubenswrapper[4875]: I1007 07:58:44.980890 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Oct 07 07:58:44 crc kubenswrapper[4875]: I1007 07:58:44.988883 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 07 07:58:44 crc kubenswrapper[4875]: I1007 07:58:44.998029 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager"/"installer-sa-dockercfg-kjl2n" Oct 07 07:58:44 crc kubenswrapper[4875]: I1007 07:58:44.998210 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager"/"kube-root-ca.crt" Oct 07 07:58:45 crc kubenswrapper[4875]: I1007 07:58:45.007328 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Oct 07 07:58:45 crc kubenswrapper[4875]: I1007 07:58:45.098000 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/36b27857-30e7-48c7-b0c9-5644dd4f631a-utilities\") pod \"redhat-marketplace-4z9kn\" (UID: \"36b27857-30e7-48c7-b0c9-5644dd4f631a\") " pod="openshift-marketplace/redhat-marketplace-4z9kn" Oct 07 07:58:45 crc kubenswrapper[4875]: I1007 07:58:45.098480 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/36b27857-30e7-48c7-b0c9-5644dd4f631a-catalog-content\") pod \"redhat-marketplace-4z9kn\" (UID: \"36b27857-30e7-48c7-b0c9-5644dd4f631a\") " pod="openshift-marketplace/redhat-marketplace-4z9kn" Oct 07 07:58:45 crc kubenswrapper[4875]: I1007 07:58:45.098546 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vt9n7\" (UniqueName: \"kubernetes.io/projected/36b27857-30e7-48c7-b0c9-5644dd4f631a-kube-api-access-vt9n7\") pod \"redhat-marketplace-4z9kn\" (UID: \"36b27857-30e7-48c7-b0c9-5644dd4f631a\") " pod="openshift-marketplace/redhat-marketplace-4z9kn" Oct 07 07:58:45 crc kubenswrapper[4875]: I1007 07:58:45.098597 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b5q5p\" (UID: \"2fcc08ca-5985-4132-a071-6821ef40cc5f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b5q5p" Oct 07 07:58:45 crc kubenswrapper[4875]: I1007 07:58:45.110504 4875 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Oct 07 07:58:45 crc kubenswrapper[4875]: I1007 07:58:45.110560 4875 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b5q5p\" (UID: \"2fcc08ca-5985-4132-a071-6821ef40cc5f\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount\"" pod="openshift-image-registry/image-registry-697d97f7c8-b5q5p" Oct 07 07:58:45 crc kubenswrapper[4875]: I1007 07:58:45.200936 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/36b27857-30e7-48c7-b0c9-5644dd4f631a-utilities\") pod \"redhat-marketplace-4z9kn\" (UID: \"36b27857-30e7-48c7-b0c9-5644dd4f631a\") " pod="openshift-marketplace/redhat-marketplace-4z9kn" Oct 07 07:58:45 crc kubenswrapper[4875]: I1007 07:58:45.200989 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/36b27857-30e7-48c7-b0c9-5644dd4f631a-catalog-content\") pod \"redhat-marketplace-4z9kn\" (UID: \"36b27857-30e7-48c7-b0c9-5644dd4f631a\") " pod="openshift-marketplace/redhat-marketplace-4z9kn" Oct 07 07:58:45 crc kubenswrapper[4875]: I1007 07:58:45.201009 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/52f6b062-901f-4124-9509-bd2e495de62d-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"52f6b062-901f-4124-9509-bd2e495de62d\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 07 07:58:45 crc kubenswrapper[4875]: I1007 07:58:45.201052 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vt9n7\" (UniqueName: \"kubernetes.io/projected/36b27857-30e7-48c7-b0c9-5644dd4f631a-kube-api-access-vt9n7\") pod \"redhat-marketplace-4z9kn\" (UID: \"36b27857-30e7-48c7-b0c9-5644dd4f631a\") " pod="openshift-marketplace/redhat-marketplace-4z9kn" Oct 07 07:58:45 crc kubenswrapper[4875]: I1007 07:58:45.201113 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/52f6b062-901f-4124-9509-bd2e495de62d-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"52f6b062-901f-4124-9509-bd2e495de62d\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 07 07:58:45 crc kubenswrapper[4875]: I1007 07:58:45.201550 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/36b27857-30e7-48c7-b0c9-5644dd4f631a-utilities\") pod \"redhat-marketplace-4z9kn\" (UID: \"36b27857-30e7-48c7-b0c9-5644dd4f631a\") " pod="openshift-marketplace/redhat-marketplace-4z9kn" Oct 07 07:58:45 crc kubenswrapper[4875]: I1007 07:58:45.201757 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/36b27857-30e7-48c7-b0c9-5644dd4f631a-catalog-content\") pod \"redhat-marketplace-4z9kn\" (UID: \"36b27857-30e7-48c7-b0c9-5644dd4f631a\") " pod="openshift-marketplace/redhat-marketplace-4z9kn" Oct 07 07:58:45 crc kubenswrapper[4875]: I1007 07:58:45.212926 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b5q5p\" (UID: \"2fcc08ca-5985-4132-a071-6821ef40cc5f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b5q5p" Oct 07 07:58:45 crc kubenswrapper[4875]: I1007 07:58:45.238348 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vt9n7\" (UniqueName: \"kubernetes.io/projected/36b27857-30e7-48c7-b0c9-5644dd4f631a-kube-api-access-vt9n7\") pod \"redhat-marketplace-4z9kn\" (UID: \"36b27857-30e7-48c7-b0c9-5644dd4f631a\") " pod="openshift-marketplace/redhat-marketplace-4z9kn" Oct 07 07:58:45 crc kubenswrapper[4875]: I1007 07:58:45.284250 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-b5q5p" Oct 07 07:58:45 crc kubenswrapper[4875]: I1007 07:58:45.307635 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-4z9kn" Oct 07 07:58:45 crc kubenswrapper[4875]: I1007 07:58:45.312854 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-6kwkt"] Oct 07 07:58:45 crc kubenswrapper[4875]: I1007 07:58:45.313628 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/52f6b062-901f-4124-9509-bd2e495de62d-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"52f6b062-901f-4124-9509-bd2e495de62d\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 07 07:58:45 crc kubenswrapper[4875]: I1007 07:58:45.313706 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/52f6b062-901f-4124-9509-bd2e495de62d-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"52f6b062-901f-4124-9509-bd2e495de62d\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 07 07:58:45 crc kubenswrapper[4875]: I1007 07:58:45.322087 4875 patch_prober.go:28] interesting pod/router-default-5444994796-vpcnf container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 07 07:58:45 crc kubenswrapper[4875]: [-]has-synced failed: reason withheld Oct 07 07:58:45 crc kubenswrapper[4875]: [+]process-running ok Oct 07 07:58:45 crc kubenswrapper[4875]: healthz check failed Oct 07 07:58:45 crc kubenswrapper[4875]: I1007 07:58:45.322197 4875 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-vpcnf" podUID="9e48afb0-e9d1-4d51-a992-2de00ba12ec6" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 07 07:58:45 crc kubenswrapper[4875]: I1007 07:58:45.327616 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/52f6b062-901f-4124-9509-bd2e495de62d-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"52f6b062-901f-4124-9509-bd2e495de62d\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 07 07:58:45 crc kubenswrapper[4875]: W1007 07:58:45.346449 4875 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf4b26103_9634_4ff7_b3bd_d3819c51643f.slice/crio-3ded4d63236df673038ce0e74497fa1d29d5e4f4f51f410eb4a4505170972902 WatchSource:0}: Error finding container 3ded4d63236df673038ce0e74497fa1d29d5e4f4f51f410eb4a4505170972902: Status 404 returned error can't find the container with id 3ded4d63236df673038ce0e74497fa1d29d5e4f4f51f410eb4a4505170972902 Oct 07 07:58:45 crc kubenswrapper[4875]: I1007 07:58:45.362434 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/52f6b062-901f-4124-9509-bd2e495de62d-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"52f6b062-901f-4124-9509-bd2e495de62d\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 07 07:58:45 crc kubenswrapper[4875]: I1007 07:58:45.440134 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 07 07:58:45 crc kubenswrapper[4875]: I1007 07:58:45.550978 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-mlrfc"] Oct 07 07:58:45 crc kubenswrapper[4875]: I1007 07:58:45.552336 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-mlrfc" Oct 07 07:58:45 crc kubenswrapper[4875]: I1007 07:58:45.559459 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Oct 07 07:58:45 crc kubenswrapper[4875]: I1007 07:58:45.568882 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-mlrfc"] Oct 07 07:58:45 crc kubenswrapper[4875]: I1007 07:58:45.623511 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e2939977-2c4d-49a5-9aaf-a9ff78f7925f-catalog-content\") pod \"redhat-operators-mlrfc\" (UID: \"e2939977-2c4d-49a5-9aaf-a9ff78f7925f\") " pod="openshift-marketplace/redhat-operators-mlrfc" Oct 07 07:58:45 crc kubenswrapper[4875]: I1007 07:58:45.624031 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c2xfw\" (UniqueName: \"kubernetes.io/projected/e2939977-2c4d-49a5-9aaf-a9ff78f7925f-kube-api-access-c2xfw\") pod \"redhat-operators-mlrfc\" (UID: \"e2939977-2c4d-49a5-9aaf-a9ff78f7925f\") " pod="openshift-marketplace/redhat-operators-mlrfc" Oct 07 07:58:45 crc kubenswrapper[4875]: I1007 07:58:45.624094 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e2939977-2c4d-49a5-9aaf-a9ff78f7925f-utilities\") pod \"redhat-operators-mlrfc\" (UID: \"e2939977-2c4d-49a5-9aaf-a9ff78f7925f\") " pod="openshift-marketplace/redhat-operators-mlrfc" Oct 07 07:58:45 crc kubenswrapper[4875]: I1007 07:58:45.714198 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f668bae-612b-4b75-9490-919e737c6a3b" path="/var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes" Oct 07 07:58:45 crc kubenswrapper[4875]: I1007 07:58:45.726247 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e2939977-2c4d-49a5-9aaf-a9ff78f7925f-utilities\") pod \"redhat-operators-mlrfc\" (UID: \"e2939977-2c4d-49a5-9aaf-a9ff78f7925f\") " pod="openshift-marketplace/redhat-operators-mlrfc" Oct 07 07:58:45 crc kubenswrapper[4875]: I1007 07:58:45.726383 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e2939977-2c4d-49a5-9aaf-a9ff78f7925f-catalog-content\") pod \"redhat-operators-mlrfc\" (UID: \"e2939977-2c4d-49a5-9aaf-a9ff78f7925f\") " pod="openshift-marketplace/redhat-operators-mlrfc" Oct 07 07:58:45 crc kubenswrapper[4875]: I1007 07:58:45.726428 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c2xfw\" (UniqueName: \"kubernetes.io/projected/e2939977-2c4d-49a5-9aaf-a9ff78f7925f-kube-api-access-c2xfw\") pod \"redhat-operators-mlrfc\" (UID: \"e2939977-2c4d-49a5-9aaf-a9ff78f7925f\") " pod="openshift-marketplace/redhat-operators-mlrfc" Oct 07 07:58:45 crc kubenswrapper[4875]: I1007 07:58:45.727730 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e2939977-2c4d-49a5-9aaf-a9ff78f7925f-utilities\") pod \"redhat-operators-mlrfc\" (UID: \"e2939977-2c4d-49a5-9aaf-a9ff78f7925f\") " pod="openshift-marketplace/redhat-operators-mlrfc" Oct 07 07:58:45 crc kubenswrapper[4875]: I1007 07:58:45.727784 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e2939977-2c4d-49a5-9aaf-a9ff78f7925f-catalog-content\") pod \"redhat-operators-mlrfc\" (UID: \"e2939977-2c4d-49a5-9aaf-a9ff78f7925f\") " pod="openshift-marketplace/redhat-operators-mlrfc" Oct 07 07:58:45 crc kubenswrapper[4875]: I1007 07:58:45.760386 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c2xfw\" (UniqueName: \"kubernetes.io/projected/e2939977-2c4d-49a5-9aaf-a9ff78f7925f-kube-api-access-c2xfw\") pod \"redhat-operators-mlrfc\" (UID: \"e2939977-2c4d-49a5-9aaf-a9ff78f7925f\") " pod="openshift-marketplace/redhat-operators-mlrfc" Oct 07 07:58:45 crc kubenswrapper[4875]: I1007 07:58:45.815326 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-b5q5p"] Oct 07 07:58:45 crc kubenswrapper[4875]: I1007 07:58:45.876501 4875 generic.go:334] "Generic (PLEG): container finished" podID="18dc718c-4aa1-460c-b3bf-62df2a1d277b" containerID="11094cfd39db106746b212f9221c9fc0dc77231f532660c53aaa1dfc775616ce" exitCode=0 Oct 07 07:58:45 crc kubenswrapper[4875]: I1007 07:58:45.876621 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-x6gdw" event={"ID":"18dc718c-4aa1-460c-b3bf-62df2a1d277b","Type":"ContainerDied","Data":"11094cfd39db106746b212f9221c9fc0dc77231f532660c53aaa1dfc775616ce"} Oct 07 07:58:45 crc kubenswrapper[4875]: I1007 07:58:45.898180 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-b5q5p" event={"ID":"2fcc08ca-5985-4132-a071-6821ef40cc5f","Type":"ContainerStarted","Data":"29f1384caa019917af64256eb8b174d2a9dc3cabdbb1eb9f3acf443be3b7c926"} Oct 07 07:58:45 crc kubenswrapper[4875]: I1007 07:58:45.898859 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-mlrfc" Oct 07 07:58:45 crc kubenswrapper[4875]: I1007 07:58:45.901989 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"6e3b08b0b95e9e97d07821a363cfee83787e055339265f044c6c14a416caf296"} Oct 07 07:58:45 crc kubenswrapper[4875]: I1007 07:58:45.902286 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"9774f7dab301510a8d8fb2891e7f0cf7d9d0248f8f5d38a8abb3190ac38a2fbd"} Oct 07 07:58:45 crc kubenswrapper[4875]: I1007 07:58:45.902808 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 07:58:45 crc kubenswrapper[4875]: I1007 07:58:45.927908 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-p58wr"] Oct 07 07:58:45 crc kubenswrapper[4875]: I1007 07:58:45.928231 4875 generic.go:334] "Generic (PLEG): container finished" podID="f4b26103-9634-4ff7-b3bd-d3819c51643f" containerID="174491dcc2d08bdf4e4811579de940579248e98041de12c366f64c3b19d7b412" exitCode=0 Oct 07 07:58:45 crc kubenswrapper[4875]: I1007 07:58:45.937673 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6kwkt" event={"ID":"f4b26103-9634-4ff7-b3bd-d3819c51643f","Type":"ContainerDied","Data":"174491dcc2d08bdf4e4811579de940579248e98041de12c366f64c3b19d7b412"} Oct 07 07:58:45 crc kubenswrapper[4875]: I1007 07:58:45.940409 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6kwkt" event={"ID":"f4b26103-9634-4ff7-b3bd-d3819c51643f","Type":"ContainerStarted","Data":"3ded4d63236df673038ce0e74497fa1d29d5e4f4f51f410eb4a4505170972902"} Oct 07 07:58:45 crc kubenswrapper[4875]: I1007 07:58:45.937934 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-p58wr" Oct 07 07:58:45 crc kubenswrapper[4875]: I1007 07:58:45.971800 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-p58wr"] Oct 07 07:58:45 crc kubenswrapper[4875]: I1007 07:58:45.981543 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-4z9kn"] Oct 07 07:58:46 crc kubenswrapper[4875]: I1007 07:58:46.001391 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"dd550039e899c8af8c9a16ee737a198c19847c784ae693dafb310620fe9e5186"} Oct 07 07:58:46 crc kubenswrapper[4875]: I1007 07:58:46.001436 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"0dab80ea4aa88948c3d77d0560c832587ecccb19e3e0f1a8e955a3f00a24c28e"} Oct 07 07:58:46 crc kubenswrapper[4875]: I1007 07:58:46.012064 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"8ea05e316a7073597b4ee91425517d8a07920fe8ed47bdc20c5da296b4b5b1aa"} Oct 07 07:58:46 crc kubenswrapper[4875]: I1007 07:58:46.016970 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"02a84f91857a6793d308ccc7efb38ca05b177899f772f0b488c1e50b53777e86"} Oct 07 07:58:46 crc kubenswrapper[4875]: I1007 07:58:46.030399 4875 generic.go:334] "Generic (PLEG): container finished" podID="59aafada-cdff-4904-a432-0b9e632a8e8f" containerID="fa04d0b1c8e9234ef09b58e873de1de62354bd1aec0103150e54c3f35f2feb23" exitCode=0 Oct 07 07:58:46 crc kubenswrapper[4875]: I1007 07:58:46.030564 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wjppx" event={"ID":"59aafada-cdff-4904-a432-0b9e632a8e8f","Type":"ContainerDied","Data":"fa04d0b1c8e9234ef09b58e873de1de62354bd1aec0103150e54c3f35f2feb23"} Oct 07 07:58:46 crc kubenswrapper[4875]: I1007 07:58:46.076158 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Oct 07 07:58:46 crc kubenswrapper[4875]: I1007 07:58:46.123220 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-5khj4" event={"ID":"41dcc994-8e79-4282-8389-abc86eee67b0","Type":"ContainerStarted","Data":"a765a028878705a46a11e13bddc852c1f4777a560b88e7a911ca7913dd0ebd6c"} Oct 07 07:58:46 crc kubenswrapper[4875]: W1007 07:58:46.123772 4875 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-pod52f6b062_901f_4124_9509_bd2e495de62d.slice/crio-b751ddd435ac971b2f513ec0b959add43f7d2e6a3de5d26207114ec9bd77baef WatchSource:0}: Error finding container b751ddd435ac971b2f513ec0b959add43f7d2e6a3de5d26207114ec9bd77baef: Status 404 returned error can't find the container with id b751ddd435ac971b2f513ec0b959add43f7d2e6a3de5d26207114ec9bd77baef Oct 07 07:58:46 crc kubenswrapper[4875]: I1007 07:58:46.133216 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/61ff4df8-3587-4330-96c1-c01e05ebd803-catalog-content\") pod \"redhat-operators-p58wr\" (UID: \"61ff4df8-3587-4330-96c1-c01e05ebd803\") " pod="openshift-marketplace/redhat-operators-p58wr" Oct 07 07:58:46 crc kubenswrapper[4875]: I1007 07:58:46.133282 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/61ff4df8-3587-4330-96c1-c01e05ebd803-utilities\") pod \"redhat-operators-p58wr\" (UID: \"61ff4df8-3587-4330-96c1-c01e05ebd803\") " pod="openshift-marketplace/redhat-operators-p58wr" Oct 07 07:58:46 crc kubenswrapper[4875]: I1007 07:58:46.133397 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lz58f\" (UniqueName: \"kubernetes.io/projected/61ff4df8-3587-4330-96c1-c01e05ebd803-kube-api-access-lz58f\") pod \"redhat-operators-p58wr\" (UID: \"61ff4df8-3587-4330-96c1-c01e05ebd803\") " pod="openshift-marketplace/redhat-operators-p58wr" Oct 07 07:58:46 crc kubenswrapper[4875]: I1007 07:58:46.151253 4875 generic.go:334] "Generic (PLEG): container finished" podID="09dd69c8-1a2a-40d8-8087-a8e895ba97ee" containerID="d8583d7e078b9b81c3cc8e7b27a601effee4e42c1e14a0f3ff83e156dd4d8a51" exitCode=0 Oct 07 07:58:46 crc kubenswrapper[4875]: I1007 07:58:46.152909 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-bqwr8" event={"ID":"09dd69c8-1a2a-40d8-8087-a8e895ba97ee","Type":"ContainerDied","Data":"d8583d7e078b9b81c3cc8e7b27a601effee4e42c1e14a0f3ff83e156dd4d8a51"} Oct 07 07:58:46 crc kubenswrapper[4875]: I1007 07:58:46.218239 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="hostpath-provisioner/csi-hostpathplugin-5khj4" podStartSLOduration=12.218205176 podStartE2EDuration="12.218205176s" podCreationTimestamp="2025-10-07 07:58:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 07:58:46.148585038 +0000 UTC m=+151.108355601" watchObservedRunningTime="2025-10-07 07:58:46.218205176 +0000 UTC m=+151.177975719" Oct 07 07:58:46 crc kubenswrapper[4875]: I1007 07:58:46.239041 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lz58f\" (UniqueName: \"kubernetes.io/projected/61ff4df8-3587-4330-96c1-c01e05ebd803-kube-api-access-lz58f\") pod \"redhat-operators-p58wr\" (UID: \"61ff4df8-3587-4330-96c1-c01e05ebd803\") " pod="openshift-marketplace/redhat-operators-p58wr" Oct 07 07:58:46 crc kubenswrapper[4875]: I1007 07:58:46.239495 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/61ff4df8-3587-4330-96c1-c01e05ebd803-catalog-content\") pod \"redhat-operators-p58wr\" (UID: \"61ff4df8-3587-4330-96c1-c01e05ebd803\") " pod="openshift-marketplace/redhat-operators-p58wr" Oct 07 07:58:46 crc kubenswrapper[4875]: I1007 07:58:46.239520 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/61ff4df8-3587-4330-96c1-c01e05ebd803-utilities\") pod \"redhat-operators-p58wr\" (UID: \"61ff4df8-3587-4330-96c1-c01e05ebd803\") " pod="openshift-marketplace/redhat-operators-p58wr" Oct 07 07:58:46 crc kubenswrapper[4875]: I1007 07:58:46.240690 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/61ff4df8-3587-4330-96c1-c01e05ebd803-catalog-content\") pod \"redhat-operators-p58wr\" (UID: \"61ff4df8-3587-4330-96c1-c01e05ebd803\") " pod="openshift-marketplace/redhat-operators-p58wr" Oct 07 07:58:46 crc kubenswrapper[4875]: I1007 07:58:46.241314 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/61ff4df8-3587-4330-96c1-c01e05ebd803-utilities\") pod \"redhat-operators-p58wr\" (UID: \"61ff4df8-3587-4330-96c1-c01e05ebd803\") " pod="openshift-marketplace/redhat-operators-p58wr" Oct 07 07:58:46 crc kubenswrapper[4875]: I1007 07:58:46.279767 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lz58f\" (UniqueName: \"kubernetes.io/projected/61ff4df8-3587-4330-96c1-c01e05ebd803-kube-api-access-lz58f\") pod \"redhat-operators-p58wr\" (UID: \"61ff4df8-3587-4330-96c1-c01e05ebd803\") " pod="openshift-marketplace/redhat-operators-p58wr" Oct 07 07:58:46 crc kubenswrapper[4875]: I1007 07:58:46.322003 4875 patch_prober.go:28] interesting pod/router-default-5444994796-vpcnf container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 07 07:58:46 crc kubenswrapper[4875]: [-]has-synced failed: reason withheld Oct 07 07:58:46 crc kubenswrapper[4875]: [+]process-running ok Oct 07 07:58:46 crc kubenswrapper[4875]: healthz check failed Oct 07 07:58:46 crc kubenswrapper[4875]: I1007 07:58:46.322258 4875 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-vpcnf" podUID="9e48afb0-e9d1-4d51-a992-2de00ba12ec6" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 07 07:58:46 crc kubenswrapper[4875]: I1007 07:58:46.335529 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-p58wr" Oct 07 07:58:46 crc kubenswrapper[4875]: I1007 07:58:46.427437 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-mlrfc"] Oct 07 07:58:46 crc kubenswrapper[4875]: I1007 07:58:46.648444 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29330385-lc8x5" Oct 07 07:58:46 crc kubenswrapper[4875]: I1007 07:58:46.700815 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-p58wr"] Oct 07 07:58:46 crc kubenswrapper[4875]: I1007 07:58:46.741494 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-f9d7485db-wccw7" Oct 07 07:58:46 crc kubenswrapper[4875]: I1007 07:58:46.741789 4875 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-f9d7485db-wccw7" Oct 07 07:58:46 crc kubenswrapper[4875]: I1007 07:58:46.747579 4875 patch_prober.go:28] interesting pod/console-f9d7485db-wccw7 container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.12:8443/health\": dial tcp 10.217.0.12:8443: connect: connection refused" start-of-body= Oct 07 07:58:46 crc kubenswrapper[4875]: I1007 07:58:46.747637 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v4xms\" (UniqueName: \"kubernetes.io/projected/2da55ba3-88e7-4cda-8ad9-b5945e39e991-kube-api-access-v4xms\") pod \"2da55ba3-88e7-4cda-8ad9-b5945e39e991\" (UID: \"2da55ba3-88e7-4cda-8ad9-b5945e39e991\") " Oct 07 07:58:46 crc kubenswrapper[4875]: I1007 07:58:46.747657 4875 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-wccw7" podUID="fd8a9079-4c7c-467a-9e0d-fa0c2bc15482" containerName="console" probeResult="failure" output="Get \"https://10.217.0.12:8443/health\": dial tcp 10.217.0.12:8443: connect: connection refused" Oct 07 07:58:46 crc kubenswrapper[4875]: I1007 07:58:46.747689 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/2da55ba3-88e7-4cda-8ad9-b5945e39e991-config-volume\") pod \"2da55ba3-88e7-4cda-8ad9-b5945e39e991\" (UID: \"2da55ba3-88e7-4cda-8ad9-b5945e39e991\") " Oct 07 07:58:46 crc kubenswrapper[4875]: I1007 07:58:46.747745 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/2da55ba3-88e7-4cda-8ad9-b5945e39e991-secret-volume\") pod \"2da55ba3-88e7-4cda-8ad9-b5945e39e991\" (UID: \"2da55ba3-88e7-4cda-8ad9-b5945e39e991\") " Oct 07 07:58:46 crc kubenswrapper[4875]: I1007 07:58:46.758334 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2da55ba3-88e7-4cda-8ad9-b5945e39e991-kube-api-access-v4xms" (OuterVolumeSpecName: "kube-api-access-v4xms") pod "2da55ba3-88e7-4cda-8ad9-b5945e39e991" (UID: "2da55ba3-88e7-4cda-8ad9-b5945e39e991"). InnerVolumeSpecName "kube-api-access-v4xms". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 07:58:46 crc kubenswrapper[4875]: I1007 07:58:46.758349 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2da55ba3-88e7-4cda-8ad9-b5945e39e991-config-volume" (OuterVolumeSpecName: "config-volume") pod "2da55ba3-88e7-4cda-8ad9-b5945e39e991" (UID: "2da55ba3-88e7-4cda-8ad9-b5945e39e991"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 07:58:46 crc kubenswrapper[4875]: I1007 07:58:46.758869 4875 patch_prober.go:28] interesting pod/downloads-7954f5f757-4fthd container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" start-of-body= Oct 07 07:58:46 crc kubenswrapper[4875]: I1007 07:58:46.758931 4875 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-4fthd" podUID="97431ef0-70e0-4c93-9ebc-ba3c9823685f" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" Oct 07 07:58:46 crc kubenswrapper[4875]: I1007 07:58:46.759018 4875 patch_prober.go:28] interesting pod/downloads-7954f5f757-4fthd container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" start-of-body= Oct 07 07:58:46 crc kubenswrapper[4875]: I1007 07:58:46.759081 4875 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-4fthd" podUID="97431ef0-70e0-4c93-9ebc-ba3c9823685f" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" Oct 07 07:58:46 crc kubenswrapper[4875]: I1007 07:58:46.759628 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2da55ba3-88e7-4cda-8ad9-b5945e39e991-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "2da55ba3-88e7-4cda-8ad9-b5945e39e991" (UID: "2da55ba3-88e7-4cda-8ad9-b5945e39e991"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 07:58:46 crc kubenswrapper[4875]: W1007 07:58:46.819936 4875 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod61ff4df8_3587_4330_96c1_c01e05ebd803.slice/crio-2f7ee507dce12d80b8167bbbebf4f84c1222646a0659f382a8845217159c52a4 WatchSource:0}: Error finding container 2f7ee507dce12d80b8167bbbebf4f84c1222646a0659f382a8845217159c52a4: Status 404 returned error can't find the container with id 2f7ee507dce12d80b8167bbbebf4f84c1222646a0659f382a8845217159c52a4 Oct 07 07:58:46 crc kubenswrapper[4875]: I1007 07:58:46.850416 4875 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/2da55ba3-88e7-4cda-8ad9-b5945e39e991-config-volume\") on node \"crc\" DevicePath \"\"" Oct 07 07:58:46 crc kubenswrapper[4875]: I1007 07:58:46.850457 4875 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/2da55ba3-88e7-4cda-8ad9-b5945e39e991-secret-volume\") on node \"crc\" DevicePath \"\"" Oct 07 07:58:46 crc kubenswrapper[4875]: I1007 07:58:46.850470 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v4xms\" (UniqueName: \"kubernetes.io/projected/2da55ba3-88e7-4cda-8ad9-b5945e39e991-kube-api-access-v4xms\") on node \"crc\" DevicePath \"\"" Oct 07 07:58:47 crc kubenswrapper[4875]: I1007 07:58:47.166748 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29330385-lc8x5" event={"ID":"2da55ba3-88e7-4cda-8ad9-b5945e39e991","Type":"ContainerDied","Data":"1c0815cfd8ddb9b5a4ab536fb4a22a2f12a89701ed356e137ee53ec2a2384a50"} Oct 07 07:58:47 crc kubenswrapper[4875]: I1007 07:58:47.167414 4875 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1c0815cfd8ddb9b5a4ab536fb4a22a2f12a89701ed356e137ee53ec2a2384a50" Oct 07 07:58:47 crc kubenswrapper[4875]: I1007 07:58:47.166896 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29330385-lc8x5" Oct 07 07:58:47 crc kubenswrapper[4875]: I1007 07:58:47.173173 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-p58wr" event={"ID":"61ff4df8-3587-4330-96c1-c01e05ebd803","Type":"ContainerStarted","Data":"55050330219edf9d3128afed9b81b93d71108286ee205605694df3d26575b183"} Oct 07 07:58:47 crc kubenswrapper[4875]: I1007 07:58:47.173229 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-p58wr" event={"ID":"61ff4df8-3587-4330-96c1-c01e05ebd803","Type":"ContainerStarted","Data":"2f7ee507dce12d80b8167bbbebf4f84c1222646a0659f382a8845217159c52a4"} Oct 07 07:58:47 crc kubenswrapper[4875]: I1007 07:58:47.189314 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-b5q5p" event={"ID":"2fcc08ca-5985-4132-a071-6821ef40cc5f","Type":"ContainerStarted","Data":"a2a01886f625e9edc5928ea6b2fc9e9fc948790e5273dce888deec7d811d29ec"} Oct 07 07:58:47 crc kubenswrapper[4875]: I1007 07:58:47.189663 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-697d97f7c8-b5q5p" Oct 07 07:58:47 crc kubenswrapper[4875]: I1007 07:58:47.192580 4875 generic.go:334] "Generic (PLEG): container finished" podID="e2939977-2c4d-49a5-9aaf-a9ff78f7925f" containerID="6f795231dcfee118da6fe7afa9dcac0a77f8b35329ae8f30997dcac7c0a4af62" exitCode=0 Oct 07 07:58:47 crc kubenswrapper[4875]: I1007 07:58:47.192696 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mlrfc" event={"ID":"e2939977-2c4d-49a5-9aaf-a9ff78f7925f","Type":"ContainerDied","Data":"6f795231dcfee118da6fe7afa9dcac0a77f8b35329ae8f30997dcac7c0a4af62"} Oct 07 07:58:47 crc kubenswrapper[4875]: I1007 07:58:47.192723 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mlrfc" event={"ID":"e2939977-2c4d-49a5-9aaf-a9ff78f7925f","Type":"ContainerStarted","Data":"9ba5bd8617351748d0509eeaa880771801903dd9e72b49382c08b94087ee6222"} Oct 07 07:58:47 crc kubenswrapper[4875]: I1007 07:58:47.206435 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"52f6b062-901f-4124-9509-bd2e495de62d","Type":"ContainerStarted","Data":"a1dd2401209b70180522fc924463ccd0bfd0e5308b2eec1e36c95781344774cb"} Oct 07 07:58:47 crc kubenswrapper[4875]: I1007 07:58:47.206494 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"52f6b062-901f-4124-9509-bd2e495de62d","Type":"ContainerStarted","Data":"b751ddd435ac971b2f513ec0b959add43f7d2e6a3de5d26207114ec9bd77baef"} Oct 07 07:58:47 crc kubenswrapper[4875]: I1007 07:58:47.209933 4875 generic.go:334] "Generic (PLEG): container finished" podID="36b27857-30e7-48c7-b0c9-5644dd4f631a" containerID="8ed34763eaee3e15150bc86cc25aa946c94c5b6925f03520431087025da8a2e0" exitCode=0 Oct 07 07:58:47 crc kubenswrapper[4875]: I1007 07:58:47.209990 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4z9kn" event={"ID":"36b27857-30e7-48c7-b0c9-5644dd4f631a","Type":"ContainerDied","Data":"8ed34763eaee3e15150bc86cc25aa946c94c5b6925f03520431087025da8a2e0"} Oct 07 07:58:47 crc kubenswrapper[4875]: I1007 07:58:47.210080 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4z9kn" event={"ID":"36b27857-30e7-48c7-b0c9-5644dd4f631a","Type":"ContainerStarted","Data":"83c21425843c6d97a1d0564beebb9b2273545ef3aa519957647f95a33b450cc0"} Oct 07 07:58:47 crc kubenswrapper[4875]: I1007 07:58:47.248026 4875 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-apiserver/apiserver-76f77b778f-7clsb" Oct 07 07:58:47 crc kubenswrapper[4875]: I1007 07:58:47.255695 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-697d97f7c8-b5q5p" podStartSLOduration=131.255670263 podStartE2EDuration="2m11.255670263s" podCreationTimestamp="2025-10-07 07:56:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 07:58:47.235921526 +0000 UTC m=+152.195692079" watchObservedRunningTime="2025-10-07 07:58:47.255670263 +0000 UTC m=+152.215440806" Oct 07 07:58:47 crc kubenswrapper[4875]: I1007 07:58:47.262978 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-apiserver/apiserver-76f77b778f-7clsb" Oct 07 07:58:47 crc kubenswrapper[4875]: I1007 07:58:47.274571 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/revision-pruner-9-crc" podStartSLOduration=3.274542311 podStartE2EDuration="3.274542311s" podCreationTimestamp="2025-10-07 07:58:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 07:58:47.251034441 +0000 UTC m=+152.210805004" watchObservedRunningTime="2025-10-07 07:58:47.274542311 +0000 UTC m=+152.234312854" Oct 07 07:58:47 crc kubenswrapper[4875]: I1007 07:58:47.317022 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ingress/router-default-5444994796-vpcnf" Oct 07 07:58:47 crc kubenswrapper[4875]: I1007 07:58:47.321350 4875 patch_prober.go:28] interesting pod/router-default-5444994796-vpcnf container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 07 07:58:47 crc kubenswrapper[4875]: [-]has-synced failed: reason withheld Oct 07 07:58:47 crc kubenswrapper[4875]: [+]process-running ok Oct 07 07:58:47 crc kubenswrapper[4875]: healthz check failed Oct 07 07:58:47 crc kubenswrapper[4875]: I1007 07:58:47.321420 4875 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-vpcnf" podUID="9e48afb0-e9d1-4d51-a992-2de00ba12ec6" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 07 07:58:47 crc kubenswrapper[4875]: I1007 07:58:47.558206 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-558db77b4-hlnjg" Oct 07 07:58:48 crc kubenswrapper[4875]: I1007 07:58:48.230296 4875 generic.go:334] "Generic (PLEG): container finished" podID="61ff4df8-3587-4330-96c1-c01e05ebd803" containerID="55050330219edf9d3128afed9b81b93d71108286ee205605694df3d26575b183" exitCode=0 Oct 07 07:58:48 crc kubenswrapper[4875]: I1007 07:58:48.230601 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-p58wr" event={"ID":"61ff4df8-3587-4330-96c1-c01e05ebd803","Type":"ContainerDied","Data":"55050330219edf9d3128afed9b81b93d71108286ee205605694df3d26575b183"} Oct 07 07:58:48 crc kubenswrapper[4875]: I1007 07:58:48.239101 4875 generic.go:334] "Generic (PLEG): container finished" podID="52f6b062-901f-4124-9509-bd2e495de62d" containerID="a1dd2401209b70180522fc924463ccd0bfd0e5308b2eec1e36c95781344774cb" exitCode=0 Oct 07 07:58:48 crc kubenswrapper[4875]: I1007 07:58:48.239518 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"52f6b062-901f-4124-9509-bd2e495de62d","Type":"ContainerDied","Data":"a1dd2401209b70180522fc924463ccd0bfd0e5308b2eec1e36c95781344774cb"} Oct 07 07:58:48 crc kubenswrapper[4875]: I1007 07:58:48.325225 4875 patch_prober.go:28] interesting pod/router-default-5444994796-vpcnf container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 07 07:58:48 crc kubenswrapper[4875]: [-]has-synced failed: reason withheld Oct 07 07:58:48 crc kubenswrapper[4875]: [+]process-running ok Oct 07 07:58:48 crc kubenswrapper[4875]: healthz check failed Oct 07 07:58:48 crc kubenswrapper[4875]: I1007 07:58:48.325306 4875 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-vpcnf" podUID="9e48afb0-e9d1-4d51-a992-2de00ba12ec6" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 07 07:58:48 crc kubenswrapper[4875]: I1007 07:58:48.329771 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Oct 07 07:58:48 crc kubenswrapper[4875]: E1007 07:58:48.330807 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2da55ba3-88e7-4cda-8ad9-b5945e39e991" containerName="collect-profiles" Oct 07 07:58:48 crc kubenswrapper[4875]: I1007 07:58:48.330897 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="2da55ba3-88e7-4cda-8ad9-b5945e39e991" containerName="collect-profiles" Oct 07 07:58:48 crc kubenswrapper[4875]: I1007 07:58:48.331115 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="2da55ba3-88e7-4cda-8ad9-b5945e39e991" containerName="collect-profiles" Oct 07 07:58:48 crc kubenswrapper[4875]: I1007 07:58:48.332285 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 07 07:58:48 crc kubenswrapper[4875]: I1007 07:58:48.334948 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Oct 07 07:58:48 crc kubenswrapper[4875]: I1007 07:58:48.334952 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Oct 07 07:58:48 crc kubenswrapper[4875]: I1007 07:58:48.335562 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Oct 07 07:58:48 crc kubenswrapper[4875]: I1007 07:58:48.490981 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/90e6483d-355c-4721-b121-e4f4589bc881-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"90e6483d-355c-4721-b121-e4f4589bc881\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 07 07:58:48 crc kubenswrapper[4875]: I1007 07:58:48.491074 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/90e6483d-355c-4721-b121-e4f4589bc881-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"90e6483d-355c-4721-b121-e4f4589bc881\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 07 07:58:48 crc kubenswrapper[4875]: I1007 07:58:48.592077 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/90e6483d-355c-4721-b121-e4f4589bc881-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"90e6483d-355c-4721-b121-e4f4589bc881\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 07 07:58:48 crc kubenswrapper[4875]: I1007 07:58:48.592209 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/90e6483d-355c-4721-b121-e4f4589bc881-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"90e6483d-355c-4721-b121-e4f4589bc881\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 07 07:58:48 crc kubenswrapper[4875]: I1007 07:58:48.592317 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/90e6483d-355c-4721-b121-e4f4589bc881-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"90e6483d-355c-4721-b121-e4f4589bc881\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 07 07:58:48 crc kubenswrapper[4875]: I1007 07:58:48.631871 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/90e6483d-355c-4721-b121-e4f4589bc881-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"90e6483d-355c-4721-b121-e4f4589bc881\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 07 07:58:48 crc kubenswrapper[4875]: I1007 07:58:48.658443 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 07 07:58:49 crc kubenswrapper[4875]: I1007 07:58:49.160629 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Oct 07 07:58:49 crc kubenswrapper[4875]: I1007 07:58:49.248976 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"90e6483d-355c-4721-b121-e4f4589bc881","Type":"ContainerStarted","Data":"5e4a5d8be65684ff4342febf73a990fa9307ec20eaa14861038cd1a833dcba7d"} Oct 07 07:58:49 crc kubenswrapper[4875]: I1007 07:58:49.317907 4875 patch_prober.go:28] interesting pod/router-default-5444994796-vpcnf container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 07 07:58:49 crc kubenswrapper[4875]: [-]has-synced failed: reason withheld Oct 07 07:58:49 crc kubenswrapper[4875]: [+]process-running ok Oct 07 07:58:49 crc kubenswrapper[4875]: healthz check failed Oct 07 07:58:49 crc kubenswrapper[4875]: I1007 07:58:49.318055 4875 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-vpcnf" podUID="9e48afb0-e9d1-4d51-a992-2de00ba12ec6" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 07 07:58:49 crc kubenswrapper[4875]: I1007 07:58:49.518184 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 07 07:58:49 crc kubenswrapper[4875]: I1007 07:58:49.608511 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/52f6b062-901f-4124-9509-bd2e495de62d-kube-api-access\") pod \"52f6b062-901f-4124-9509-bd2e495de62d\" (UID: \"52f6b062-901f-4124-9509-bd2e495de62d\") " Oct 07 07:58:49 crc kubenswrapper[4875]: I1007 07:58:49.608768 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/52f6b062-901f-4124-9509-bd2e495de62d-kubelet-dir\") pod \"52f6b062-901f-4124-9509-bd2e495de62d\" (UID: \"52f6b062-901f-4124-9509-bd2e495de62d\") " Oct 07 07:58:49 crc kubenswrapper[4875]: I1007 07:58:49.608902 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/52f6b062-901f-4124-9509-bd2e495de62d-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "52f6b062-901f-4124-9509-bd2e495de62d" (UID: "52f6b062-901f-4124-9509-bd2e495de62d"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 07 07:58:49 crc kubenswrapper[4875]: I1007 07:58:49.609336 4875 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/52f6b062-901f-4124-9509-bd2e495de62d-kubelet-dir\") on node \"crc\" DevicePath \"\"" Oct 07 07:58:49 crc kubenswrapper[4875]: I1007 07:58:49.617492 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/52f6b062-901f-4124-9509-bd2e495de62d-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "52f6b062-901f-4124-9509-bd2e495de62d" (UID: "52f6b062-901f-4124-9509-bd2e495de62d"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 07:58:49 crc kubenswrapper[4875]: I1007 07:58:49.719592 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/52f6b062-901f-4124-9509-bd2e495de62d-kube-api-access\") on node \"crc\" DevicePath \"\"" Oct 07 07:58:50 crc kubenswrapper[4875]: I1007 07:58:50.275354 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"52f6b062-901f-4124-9509-bd2e495de62d","Type":"ContainerDied","Data":"b751ddd435ac971b2f513ec0b959add43f7d2e6a3de5d26207114ec9bd77baef"} Oct 07 07:58:50 crc kubenswrapper[4875]: I1007 07:58:50.275647 4875 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b751ddd435ac971b2f513ec0b959add43f7d2e6a3de5d26207114ec9bd77baef" Oct 07 07:58:50 crc kubenswrapper[4875]: I1007 07:58:50.275592 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 07 07:58:50 crc kubenswrapper[4875]: I1007 07:58:50.320072 4875 patch_prober.go:28] interesting pod/router-default-5444994796-vpcnf container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 07 07:58:50 crc kubenswrapper[4875]: [-]has-synced failed: reason withheld Oct 07 07:58:50 crc kubenswrapper[4875]: [+]process-running ok Oct 07 07:58:50 crc kubenswrapper[4875]: healthz check failed Oct 07 07:58:50 crc kubenswrapper[4875]: I1007 07:58:50.320134 4875 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-vpcnf" podUID="9e48afb0-e9d1-4d51-a992-2de00ba12ec6" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 07 07:58:51 crc kubenswrapper[4875]: I1007 07:58:51.298755 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"90e6483d-355c-4721-b121-e4f4589bc881","Type":"ContainerStarted","Data":"393a36be5dd338ed324354ba92d76bf1e42a11be947cce56671739f4aad7ee68"} Oct 07 07:58:51 crc kubenswrapper[4875]: I1007 07:58:51.319993 4875 patch_prober.go:28] interesting pod/router-default-5444994796-vpcnf container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 07 07:58:51 crc kubenswrapper[4875]: [-]has-synced failed: reason withheld Oct 07 07:58:51 crc kubenswrapper[4875]: [+]process-running ok Oct 07 07:58:51 crc kubenswrapper[4875]: healthz check failed Oct 07 07:58:51 crc kubenswrapper[4875]: I1007 07:58:51.320041 4875 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-vpcnf" podUID="9e48afb0-e9d1-4d51-a992-2de00ba12ec6" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 07 07:58:51 crc kubenswrapper[4875]: I1007 07:58:51.321215 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/revision-pruner-8-crc" podStartSLOduration=3.321184744 podStartE2EDuration="3.321184744s" podCreationTimestamp="2025-10-07 07:58:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 07:58:51.312635551 +0000 UTC m=+156.272406104" watchObservedRunningTime="2025-10-07 07:58:51.321184744 +0000 UTC m=+156.280955287" Oct 07 07:58:52 crc kubenswrapper[4875]: I1007 07:58:52.318078 4875 patch_prober.go:28] interesting pod/router-default-5444994796-vpcnf container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 07 07:58:52 crc kubenswrapper[4875]: [-]has-synced failed: reason withheld Oct 07 07:58:52 crc kubenswrapper[4875]: [+]process-running ok Oct 07 07:58:52 crc kubenswrapper[4875]: healthz check failed Oct 07 07:58:52 crc kubenswrapper[4875]: I1007 07:58:52.318735 4875 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-vpcnf" podUID="9e48afb0-e9d1-4d51-a992-2de00ba12ec6" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 07 07:58:52 crc kubenswrapper[4875]: I1007 07:58:52.329566 4875 generic.go:334] "Generic (PLEG): container finished" podID="90e6483d-355c-4721-b121-e4f4589bc881" containerID="393a36be5dd338ed324354ba92d76bf1e42a11be947cce56671739f4aad7ee68" exitCode=0 Oct 07 07:58:52 crc kubenswrapper[4875]: I1007 07:58:52.330121 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"90e6483d-355c-4721-b121-e4f4589bc881","Type":"ContainerDied","Data":"393a36be5dd338ed324354ba92d76bf1e42a11be947cce56671739f4aad7ee68"} Oct 07 07:58:52 crc kubenswrapper[4875]: I1007 07:58:52.964027 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-dns/dns-default-q7gd8" Oct 07 07:58:53 crc kubenswrapper[4875]: I1007 07:58:53.319978 4875 patch_prober.go:28] interesting pod/router-default-5444994796-vpcnf container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 07 07:58:53 crc kubenswrapper[4875]: [-]has-synced failed: reason withheld Oct 07 07:58:53 crc kubenswrapper[4875]: [+]process-running ok Oct 07 07:58:53 crc kubenswrapper[4875]: healthz check failed Oct 07 07:58:53 crc kubenswrapper[4875]: I1007 07:58:53.320047 4875 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-vpcnf" podUID="9e48afb0-e9d1-4d51-a992-2de00ba12ec6" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 07 07:58:54 crc kubenswrapper[4875]: I1007 07:58:54.316383 4875 patch_prober.go:28] interesting pod/router-default-5444994796-vpcnf container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 07 07:58:54 crc kubenswrapper[4875]: [-]has-synced failed: reason withheld Oct 07 07:58:54 crc kubenswrapper[4875]: [+]process-running ok Oct 07 07:58:54 crc kubenswrapper[4875]: healthz check failed Oct 07 07:58:54 crc kubenswrapper[4875]: I1007 07:58:54.316711 4875 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-vpcnf" podUID="9e48afb0-e9d1-4d51-a992-2de00ba12ec6" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 07 07:58:55 crc kubenswrapper[4875]: I1007 07:58:55.316674 4875 patch_prober.go:28] interesting pod/router-default-5444994796-vpcnf container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 07 07:58:55 crc kubenswrapper[4875]: [-]has-synced failed: reason withheld Oct 07 07:58:55 crc kubenswrapper[4875]: [+]process-running ok Oct 07 07:58:55 crc kubenswrapper[4875]: healthz check failed Oct 07 07:58:55 crc kubenswrapper[4875]: I1007 07:58:55.317056 4875 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-vpcnf" podUID="9e48afb0-e9d1-4d51-a992-2de00ba12ec6" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 07 07:58:56 crc kubenswrapper[4875]: I1007 07:58:56.318082 4875 patch_prober.go:28] interesting pod/router-default-5444994796-vpcnf container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 07 07:58:56 crc kubenswrapper[4875]: [-]has-synced failed: reason withheld Oct 07 07:58:56 crc kubenswrapper[4875]: [+]process-running ok Oct 07 07:58:56 crc kubenswrapper[4875]: healthz check failed Oct 07 07:58:56 crc kubenswrapper[4875]: I1007 07:58:56.318196 4875 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-vpcnf" podUID="9e48afb0-e9d1-4d51-a992-2de00ba12ec6" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 07 07:58:56 crc kubenswrapper[4875]: I1007 07:58:56.742728 4875 patch_prober.go:28] interesting pod/console-f9d7485db-wccw7 container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.12:8443/health\": dial tcp 10.217.0.12:8443: connect: connection refused" start-of-body= Oct 07 07:58:56 crc kubenswrapper[4875]: I1007 07:58:56.742799 4875 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-wccw7" podUID="fd8a9079-4c7c-467a-9e0d-fa0c2bc15482" containerName="console" probeResult="failure" output="Get \"https://10.217.0.12:8443/health\": dial tcp 10.217.0.12:8443: connect: connection refused" Oct 07 07:58:56 crc kubenswrapper[4875]: I1007 07:58:56.750474 4875 patch_prober.go:28] interesting pod/downloads-7954f5f757-4fthd container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" start-of-body= Oct 07 07:58:56 crc kubenswrapper[4875]: I1007 07:58:56.750541 4875 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-4fthd" podUID="97431ef0-70e0-4c93-9ebc-ba3c9823685f" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" Oct 07 07:58:56 crc kubenswrapper[4875]: I1007 07:58:56.752252 4875 patch_prober.go:28] interesting pod/downloads-7954f5f757-4fthd container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" start-of-body= Oct 07 07:58:56 crc kubenswrapper[4875]: I1007 07:58:56.752356 4875 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-4fthd" podUID="97431ef0-70e0-4c93-9ebc-ba3c9823685f" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" Oct 07 07:58:57 crc kubenswrapper[4875]: I1007 07:58:57.317634 4875 patch_prober.go:28] interesting pod/router-default-5444994796-vpcnf container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 07 07:58:57 crc kubenswrapper[4875]: [-]has-synced failed: reason withheld Oct 07 07:58:57 crc kubenswrapper[4875]: [+]process-running ok Oct 07 07:58:57 crc kubenswrapper[4875]: healthz check failed Oct 07 07:58:57 crc kubenswrapper[4875]: I1007 07:58:57.317698 4875 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-vpcnf" podUID="9e48afb0-e9d1-4d51-a992-2de00ba12ec6" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 07 07:58:58 crc kubenswrapper[4875]: I1007 07:58:58.316539 4875 patch_prober.go:28] interesting pod/router-default-5444994796-vpcnf container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 07 07:58:58 crc kubenswrapper[4875]: [-]has-synced failed: reason withheld Oct 07 07:58:58 crc kubenswrapper[4875]: [+]process-running ok Oct 07 07:58:58 crc kubenswrapper[4875]: healthz check failed Oct 07 07:58:58 crc kubenswrapper[4875]: I1007 07:58:58.317147 4875 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-vpcnf" podUID="9e48afb0-e9d1-4d51-a992-2de00ba12ec6" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 07 07:58:58 crc kubenswrapper[4875]: I1007 07:58:58.789821 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/dce21abc-1295-4d45-bd26-07b7e37d674c-metrics-certs\") pod \"network-metrics-daemon-wk8rw\" (UID: \"dce21abc-1295-4d45-bd26-07b7e37d674c\") " pod="openshift-multus/network-metrics-daemon-wk8rw" Oct 07 07:58:58 crc kubenswrapper[4875]: I1007 07:58:58.795581 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/dce21abc-1295-4d45-bd26-07b7e37d674c-metrics-certs\") pod \"network-metrics-daemon-wk8rw\" (UID: \"dce21abc-1295-4d45-bd26-07b7e37d674c\") " pod="openshift-multus/network-metrics-daemon-wk8rw" Oct 07 07:58:58 crc kubenswrapper[4875]: I1007 07:58:58.814914 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-wk8rw" Oct 07 07:58:59 crc kubenswrapper[4875]: I1007 07:58:59.071763 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 07 07:58:59 crc kubenswrapper[4875]: I1007 07:58:59.096115 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/90e6483d-355c-4721-b121-e4f4589bc881-kubelet-dir\") pod \"90e6483d-355c-4721-b121-e4f4589bc881\" (UID: \"90e6483d-355c-4721-b121-e4f4589bc881\") " Oct 07 07:58:59 crc kubenswrapper[4875]: I1007 07:58:59.096217 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/90e6483d-355c-4721-b121-e4f4589bc881-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "90e6483d-355c-4721-b121-e4f4589bc881" (UID: "90e6483d-355c-4721-b121-e4f4589bc881"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 07 07:58:59 crc kubenswrapper[4875]: I1007 07:58:59.096272 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/90e6483d-355c-4721-b121-e4f4589bc881-kube-api-access\") pod \"90e6483d-355c-4721-b121-e4f4589bc881\" (UID: \"90e6483d-355c-4721-b121-e4f4589bc881\") " Oct 07 07:58:59 crc kubenswrapper[4875]: I1007 07:58:59.096593 4875 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/90e6483d-355c-4721-b121-e4f4589bc881-kubelet-dir\") on node \"crc\" DevicePath \"\"" Oct 07 07:58:59 crc kubenswrapper[4875]: I1007 07:58:59.101329 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/90e6483d-355c-4721-b121-e4f4589bc881-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "90e6483d-355c-4721-b121-e4f4589bc881" (UID: "90e6483d-355c-4721-b121-e4f4589bc881"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 07:58:59 crc kubenswrapper[4875]: I1007 07:58:59.197914 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/90e6483d-355c-4721-b121-e4f4589bc881-kube-api-access\") on node \"crc\" DevicePath \"\"" Oct 07 07:58:59 crc kubenswrapper[4875]: I1007 07:58:59.317870 4875 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-ingress/router-default-5444994796-vpcnf" Oct 07 07:58:59 crc kubenswrapper[4875]: I1007 07:58:59.321665 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ingress/router-default-5444994796-vpcnf" Oct 07 07:58:59 crc kubenswrapper[4875]: I1007 07:58:59.406180 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"90e6483d-355c-4721-b121-e4f4589bc881","Type":"ContainerDied","Data":"5e4a5d8be65684ff4342febf73a990fa9307ec20eaa14861038cd1a833dcba7d"} Oct 07 07:58:59 crc kubenswrapper[4875]: I1007 07:58:59.406240 4875 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5e4a5d8be65684ff4342febf73a990fa9307ec20eaa14861038cd1a833dcba7d" Oct 07 07:58:59 crc kubenswrapper[4875]: I1007 07:58:59.406319 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 07 07:59:01 crc kubenswrapper[4875]: I1007 07:59:01.220587 4875 patch_prober.go:28] interesting pod/machine-config-daemon-hx68m container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 07 07:59:01 crc kubenswrapper[4875]: I1007 07:59:01.220645 4875 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" podUID="3928c10c-c3da-41eb-96b2-629d67cfb31f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 07 07:59:05 crc kubenswrapper[4875]: I1007 07:59:05.290265 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-697d97f7c8-b5q5p" Oct 07 07:59:06 crc kubenswrapper[4875]: I1007 07:59:06.747212 4875 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-f9d7485db-wccw7" Oct 07 07:59:06 crc kubenswrapper[4875]: I1007 07:59:06.750872 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-f9d7485db-wccw7" Oct 07 07:59:06 crc kubenswrapper[4875]: I1007 07:59:06.774050 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/downloads-7954f5f757-4fthd" Oct 07 07:59:16 crc kubenswrapper[4875]: I1007 07:59:16.971945 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-57m8s" Oct 07 07:59:23 crc kubenswrapper[4875]: I1007 07:59:23.826477 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 07:59:31 crc kubenswrapper[4875]: I1007 07:59:31.220621 4875 patch_prober.go:28] interesting pod/machine-config-daemon-hx68m container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 07 07:59:31 crc kubenswrapper[4875]: I1007 07:59:31.221241 4875 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" podUID="3928c10c-c3da-41eb-96b2-629d67cfb31f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 07 07:59:41 crc kubenswrapper[4875]: E1007 07:59:41.048047 4875 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Oct 07 07:59:41 crc kubenswrapper[4875]: E1007 07:59:41.049304 4875 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-j2dgx,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-wjppx_openshift-marketplace(59aafada-cdff-4904-a432-0b9e632a8e8f): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Oct 07 07:59:41 crc kubenswrapper[4875]: E1007 07:59:41.050552 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-wjppx" podUID="59aafada-cdff-4904-a432-0b9e632a8e8f" Oct 07 07:59:44 crc kubenswrapper[4875]: E1007 07:59:44.051772 4875 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Oct 07 07:59:44 crc kubenswrapper[4875]: E1007 07:59:44.052741 4875 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-nsp8f,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-htjvv_openshift-marketplace(62c991b5-7e34-484b-8346-2db6f586c972): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Oct 07 07:59:44 crc kubenswrapper[4875]: E1007 07:59:44.054177 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-htjvv" podUID="62c991b5-7e34-484b-8346-2db6f586c972" Oct 07 07:59:49 crc kubenswrapper[4875]: E1007 07:59:49.013463 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-htjvv" podUID="62c991b5-7e34-484b-8346-2db6f586c972" Oct 07 07:59:49 crc kubenswrapper[4875]: E1007 07:59:49.013482 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-wjppx" podUID="59aafada-cdff-4904-a432-0b9e632a8e8f" Oct 07 07:59:49 crc kubenswrapper[4875]: E1007 07:59:49.164355 4875 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Oct 07 07:59:49 crc kubenswrapper[4875]: E1007 07:59:49.164624 4875 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-lz58f,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-p58wr_openshift-marketplace(61ff4df8-3587-4330-96c1-c01e05ebd803): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Oct 07 07:59:49 crc kubenswrapper[4875]: E1007 07:59:49.165859 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-p58wr" podUID="61ff4df8-3587-4330-96c1-c01e05ebd803" Oct 07 07:59:49 crc kubenswrapper[4875]: E1007 07:59:49.807580 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-p58wr" podUID="61ff4df8-3587-4330-96c1-c01e05ebd803" Oct 07 07:59:49 crc kubenswrapper[4875]: E1007 07:59:49.938504 4875 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Oct 07 07:59:49 crc kubenswrapper[4875]: E1007 07:59:49.938723 4875 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-956mx,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-6kwkt_openshift-marketplace(f4b26103-9634-4ff7-b3bd-d3819c51643f): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Oct 07 07:59:49 crc kubenswrapper[4875]: E1007 07:59:49.939928 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-6kwkt" podUID="f4b26103-9634-4ff7-b3bd-d3819c51643f" Oct 07 07:59:50 crc kubenswrapper[4875]: E1007 07:59:50.104303 4875 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Oct 07 07:59:50 crc kubenswrapper[4875]: E1007 07:59:50.104474 4875 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-c2xfw,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-mlrfc_openshift-marketplace(e2939977-2c4d-49a5-9aaf-a9ff78f7925f): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Oct 07 07:59:50 crc kubenswrapper[4875]: E1007 07:59:50.105734 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-mlrfc" podUID="e2939977-2c4d-49a5-9aaf-a9ff78f7925f" Oct 07 07:59:50 crc kubenswrapper[4875]: E1007 07:59:50.417475 4875 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Oct 07 07:59:50 crc kubenswrapper[4875]: E1007 07:59:50.417712 4875 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-vt9n7,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-4z9kn_openshift-marketplace(36b27857-30e7-48c7-b0c9-5644dd4f631a): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Oct 07 07:59:50 crc kubenswrapper[4875]: E1007 07:59:50.418911 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-4z9kn" podUID="36b27857-30e7-48c7-b0c9-5644dd4f631a" Oct 07 07:59:51 crc kubenswrapper[4875]: E1007 07:59:51.460318 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-mlrfc" podUID="e2939977-2c4d-49a5-9aaf-a9ff78f7925f" Oct 07 07:59:51 crc kubenswrapper[4875]: E1007 07:59:51.460518 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-4z9kn" podUID="36b27857-30e7-48c7-b0c9-5644dd4f631a" Oct 07 07:59:51 crc kubenswrapper[4875]: E1007 07:59:51.460618 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-6kwkt" podUID="f4b26103-9634-4ff7-b3bd-d3819c51643f" Oct 07 07:59:51 crc kubenswrapper[4875]: I1007 07:59:51.715669 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-wk8rw"] Oct 07 07:59:51 crc kubenswrapper[4875]: W1007 07:59:51.723596 4875 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poddce21abc_1295_4d45_bd26_07b7e37d674c.slice/crio-fbd315cf5ea0320101beabde545943cd6e3628a84cbdbcd3c7d35c9fb76b6c52 WatchSource:0}: Error finding container fbd315cf5ea0320101beabde545943cd6e3628a84cbdbcd3c7d35c9fb76b6c52: Status 404 returned error can't find the container with id fbd315cf5ea0320101beabde545943cd6e3628a84cbdbcd3c7d35c9fb76b6c52 Oct 07 07:59:51 crc kubenswrapper[4875]: E1007 07:59:51.737104 4875 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Oct 07 07:59:51 crc kubenswrapper[4875]: E1007 07:59:51.737297 4875 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-sdh6h,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-bqwr8_openshift-marketplace(09dd69c8-1a2a-40d8-8087-a8e895ba97ee): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Oct 07 07:59:51 crc kubenswrapper[4875]: E1007 07:59:51.738634 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-bqwr8" podUID="09dd69c8-1a2a-40d8-8087-a8e895ba97ee" Oct 07 07:59:51 crc kubenswrapper[4875]: E1007 07:59:51.923654 4875 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Oct 07 07:59:51 crc kubenswrapper[4875]: E1007 07:59:51.924393 4875 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-n4tm8,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-x6gdw_openshift-marketplace(18dc718c-4aa1-460c-b3bf-62df2a1d277b): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Oct 07 07:59:51 crc kubenswrapper[4875]: E1007 07:59:51.925929 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-x6gdw" podUID="18dc718c-4aa1-460c-b3bf-62df2a1d277b" Oct 07 07:59:52 crc kubenswrapper[4875]: I1007 07:59:52.695640 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-wk8rw" event={"ID":"dce21abc-1295-4d45-bd26-07b7e37d674c","Type":"ContainerStarted","Data":"2070310f8834e182929e20a91ec561a77a1efce0fbe899213c22489927b26ac6"} Oct 07 07:59:52 crc kubenswrapper[4875]: I1007 07:59:52.696580 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-wk8rw" event={"ID":"dce21abc-1295-4d45-bd26-07b7e37d674c","Type":"ContainerStarted","Data":"9362ec26214368fad8f244902834b2a8919aabfa458967209c5158af050b6a5d"} Oct 07 07:59:52 crc kubenswrapper[4875]: I1007 07:59:52.696624 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-wk8rw" event={"ID":"dce21abc-1295-4d45-bd26-07b7e37d674c","Type":"ContainerStarted","Data":"fbd315cf5ea0320101beabde545943cd6e3628a84cbdbcd3c7d35c9fb76b6c52"} Oct 07 07:59:52 crc kubenswrapper[4875]: E1007 07:59:52.700014 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-bqwr8" podUID="09dd69c8-1a2a-40d8-8087-a8e895ba97ee" Oct 07 07:59:52 crc kubenswrapper[4875]: E1007 07:59:52.702064 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-x6gdw" podUID="18dc718c-4aa1-460c-b3bf-62df2a1d277b" Oct 07 07:59:52 crc kubenswrapper[4875]: I1007 07:59:52.716097 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/network-metrics-daemon-wk8rw" podStartSLOduration=196.716054349 podStartE2EDuration="3m16.716054349s" podCreationTimestamp="2025-10-07 07:56:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 07:59:52.714616625 +0000 UTC m=+217.674387168" watchObservedRunningTime="2025-10-07 07:59:52.716054349 +0000 UTC m=+217.675824932" Oct 07 08:00:00 crc kubenswrapper[4875]: I1007 08:00:00.153509 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29330400-j2pbn"] Oct 07 08:00:00 crc kubenswrapper[4875]: E1007 08:00:00.154921 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="52f6b062-901f-4124-9509-bd2e495de62d" containerName="pruner" Oct 07 08:00:00 crc kubenswrapper[4875]: I1007 08:00:00.154948 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="52f6b062-901f-4124-9509-bd2e495de62d" containerName="pruner" Oct 07 08:00:00 crc kubenswrapper[4875]: E1007 08:00:00.154990 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="90e6483d-355c-4721-b121-e4f4589bc881" containerName="pruner" Oct 07 08:00:00 crc kubenswrapper[4875]: I1007 08:00:00.155003 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="90e6483d-355c-4721-b121-e4f4589bc881" containerName="pruner" Oct 07 08:00:00 crc kubenswrapper[4875]: I1007 08:00:00.155187 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="52f6b062-901f-4124-9509-bd2e495de62d" containerName="pruner" Oct 07 08:00:00 crc kubenswrapper[4875]: I1007 08:00:00.155204 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="90e6483d-355c-4721-b121-e4f4589bc881" containerName="pruner" Oct 07 08:00:00 crc kubenswrapper[4875]: I1007 08:00:00.156402 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29330400-j2pbn" Oct 07 08:00:00 crc kubenswrapper[4875]: I1007 08:00:00.159017 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/40d3c35c-8c62-46e5-8bb7-e013ac3a2a95-secret-volume\") pod \"collect-profiles-29330400-j2pbn\" (UID: \"40d3c35c-8c62-46e5-8bb7-e013ac3a2a95\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29330400-j2pbn" Oct 07 08:00:00 crc kubenswrapper[4875]: I1007 08:00:00.159082 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/40d3c35c-8c62-46e5-8bb7-e013ac3a2a95-config-volume\") pod \"collect-profiles-29330400-j2pbn\" (UID: \"40d3c35c-8c62-46e5-8bb7-e013ac3a2a95\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29330400-j2pbn" Oct 07 08:00:00 crc kubenswrapper[4875]: I1007 08:00:00.159105 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9tlc4\" (UniqueName: \"kubernetes.io/projected/40d3c35c-8c62-46e5-8bb7-e013ac3a2a95-kube-api-access-9tlc4\") pod \"collect-profiles-29330400-j2pbn\" (UID: \"40d3c35c-8c62-46e5-8bb7-e013ac3a2a95\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29330400-j2pbn" Oct 07 08:00:00 crc kubenswrapper[4875]: I1007 08:00:00.166611 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Oct 07 08:00:00 crc kubenswrapper[4875]: I1007 08:00:00.166656 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Oct 07 08:00:00 crc kubenswrapper[4875]: I1007 08:00:00.171677 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29330400-j2pbn"] Oct 07 08:00:00 crc kubenswrapper[4875]: I1007 08:00:00.259853 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/40d3c35c-8c62-46e5-8bb7-e013ac3a2a95-secret-volume\") pod \"collect-profiles-29330400-j2pbn\" (UID: \"40d3c35c-8c62-46e5-8bb7-e013ac3a2a95\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29330400-j2pbn" Oct 07 08:00:00 crc kubenswrapper[4875]: I1007 08:00:00.260317 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/40d3c35c-8c62-46e5-8bb7-e013ac3a2a95-config-volume\") pod \"collect-profiles-29330400-j2pbn\" (UID: \"40d3c35c-8c62-46e5-8bb7-e013ac3a2a95\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29330400-j2pbn" Oct 07 08:00:00 crc kubenswrapper[4875]: I1007 08:00:00.260424 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9tlc4\" (UniqueName: \"kubernetes.io/projected/40d3c35c-8c62-46e5-8bb7-e013ac3a2a95-kube-api-access-9tlc4\") pod \"collect-profiles-29330400-j2pbn\" (UID: \"40d3c35c-8c62-46e5-8bb7-e013ac3a2a95\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29330400-j2pbn" Oct 07 08:00:00 crc kubenswrapper[4875]: I1007 08:00:00.261669 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/40d3c35c-8c62-46e5-8bb7-e013ac3a2a95-config-volume\") pod \"collect-profiles-29330400-j2pbn\" (UID: \"40d3c35c-8c62-46e5-8bb7-e013ac3a2a95\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29330400-j2pbn" Oct 07 08:00:00 crc kubenswrapper[4875]: I1007 08:00:00.267019 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/40d3c35c-8c62-46e5-8bb7-e013ac3a2a95-secret-volume\") pod \"collect-profiles-29330400-j2pbn\" (UID: \"40d3c35c-8c62-46e5-8bb7-e013ac3a2a95\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29330400-j2pbn" Oct 07 08:00:00 crc kubenswrapper[4875]: I1007 08:00:00.279274 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9tlc4\" (UniqueName: \"kubernetes.io/projected/40d3c35c-8c62-46e5-8bb7-e013ac3a2a95-kube-api-access-9tlc4\") pod \"collect-profiles-29330400-j2pbn\" (UID: \"40d3c35c-8c62-46e5-8bb7-e013ac3a2a95\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29330400-j2pbn" Oct 07 08:00:00 crc kubenswrapper[4875]: I1007 08:00:00.491033 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29330400-j2pbn" Oct 07 08:00:00 crc kubenswrapper[4875]: I1007 08:00:00.746848 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29330400-j2pbn"] Oct 07 08:00:00 crc kubenswrapper[4875]: W1007 08:00:00.752765 4875 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod40d3c35c_8c62_46e5_8bb7_e013ac3a2a95.slice/crio-eb93b6b8b03ce9131b59e017c6408cfcae9aec406acd4b157c3adb9181228f8b WatchSource:0}: Error finding container eb93b6b8b03ce9131b59e017c6408cfcae9aec406acd4b157c3adb9181228f8b: Status 404 returned error can't find the container with id eb93b6b8b03ce9131b59e017c6408cfcae9aec406acd4b157c3adb9181228f8b Oct 07 08:00:00 crc kubenswrapper[4875]: I1007 08:00:00.765078 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29330400-j2pbn" event={"ID":"40d3c35c-8c62-46e5-8bb7-e013ac3a2a95","Type":"ContainerStarted","Data":"eb93b6b8b03ce9131b59e017c6408cfcae9aec406acd4b157c3adb9181228f8b"} Oct 07 08:00:01 crc kubenswrapper[4875]: I1007 08:00:01.220720 4875 patch_prober.go:28] interesting pod/machine-config-daemon-hx68m container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 07 08:00:01 crc kubenswrapper[4875]: I1007 08:00:01.220803 4875 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" podUID="3928c10c-c3da-41eb-96b2-629d67cfb31f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 07 08:00:01 crc kubenswrapper[4875]: I1007 08:00:01.220905 4875 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" Oct 07 08:00:01 crc kubenswrapper[4875]: I1007 08:00:01.221812 4875 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"34b235fca1c383e868641a953bb908901bcd10528ee9fb668c1f71eb6e0e978a"} pod="openshift-machine-config-operator/machine-config-daemon-hx68m" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 07 08:00:01 crc kubenswrapper[4875]: I1007 08:00:01.221951 4875 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" podUID="3928c10c-c3da-41eb-96b2-629d67cfb31f" containerName="machine-config-daemon" containerID="cri-o://34b235fca1c383e868641a953bb908901bcd10528ee9fb668c1f71eb6e0e978a" gracePeriod=600 Oct 07 08:00:01 crc kubenswrapper[4875]: I1007 08:00:01.775971 4875 generic.go:334] "Generic (PLEG): container finished" podID="3928c10c-c3da-41eb-96b2-629d67cfb31f" containerID="34b235fca1c383e868641a953bb908901bcd10528ee9fb668c1f71eb6e0e978a" exitCode=0 Oct 07 08:00:01 crc kubenswrapper[4875]: I1007 08:00:01.776335 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" event={"ID":"3928c10c-c3da-41eb-96b2-629d67cfb31f","Type":"ContainerDied","Data":"34b235fca1c383e868641a953bb908901bcd10528ee9fb668c1f71eb6e0e978a"} Oct 07 08:00:01 crc kubenswrapper[4875]: I1007 08:00:01.776380 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" event={"ID":"3928c10c-c3da-41eb-96b2-629d67cfb31f","Type":"ContainerStarted","Data":"b8c543d0eadeb6dfce7faef1c8e428b6173e0354739b00fae2ae9968ce2f005f"} Oct 07 08:00:01 crc kubenswrapper[4875]: I1007 08:00:01.784768 4875 generic.go:334] "Generic (PLEG): container finished" podID="40d3c35c-8c62-46e5-8bb7-e013ac3a2a95" containerID="0f0705d63b621eb3929ff72e7946c29b9c2a70d4d688999fd1752a0b08d86ab9" exitCode=0 Oct 07 08:00:01 crc kubenswrapper[4875]: I1007 08:00:01.784836 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29330400-j2pbn" event={"ID":"40d3c35c-8c62-46e5-8bb7-e013ac3a2a95","Type":"ContainerDied","Data":"0f0705d63b621eb3929ff72e7946c29b9c2a70d4d688999fd1752a0b08d86ab9"} Oct 07 08:00:02 crc kubenswrapper[4875]: I1007 08:00:02.791092 4875 generic.go:334] "Generic (PLEG): container finished" podID="61ff4df8-3587-4330-96c1-c01e05ebd803" containerID="169b7612dd0a489e6bc65631b6b663e83461bc7954a0f32b6b061df7895b3739" exitCode=0 Oct 07 08:00:02 crc kubenswrapper[4875]: I1007 08:00:02.791177 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-p58wr" event={"ID":"61ff4df8-3587-4330-96c1-c01e05ebd803","Type":"ContainerDied","Data":"169b7612dd0a489e6bc65631b6b663e83461bc7954a0f32b6b061df7895b3739"} Oct 07 08:00:02 crc kubenswrapper[4875]: I1007 08:00:02.998693 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29330400-j2pbn" Oct 07 08:00:03 crc kubenswrapper[4875]: I1007 08:00:03.201820 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9tlc4\" (UniqueName: \"kubernetes.io/projected/40d3c35c-8c62-46e5-8bb7-e013ac3a2a95-kube-api-access-9tlc4\") pod \"40d3c35c-8c62-46e5-8bb7-e013ac3a2a95\" (UID: \"40d3c35c-8c62-46e5-8bb7-e013ac3a2a95\") " Oct 07 08:00:03 crc kubenswrapper[4875]: I1007 08:00:03.203181 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/40d3c35c-8c62-46e5-8bb7-e013ac3a2a95-secret-volume\") pod \"40d3c35c-8c62-46e5-8bb7-e013ac3a2a95\" (UID: \"40d3c35c-8c62-46e5-8bb7-e013ac3a2a95\") " Oct 07 08:00:03 crc kubenswrapper[4875]: I1007 08:00:03.203361 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/40d3c35c-8c62-46e5-8bb7-e013ac3a2a95-config-volume\") pod \"40d3c35c-8c62-46e5-8bb7-e013ac3a2a95\" (UID: \"40d3c35c-8c62-46e5-8bb7-e013ac3a2a95\") " Oct 07 08:00:03 crc kubenswrapper[4875]: I1007 08:00:03.204512 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/40d3c35c-8c62-46e5-8bb7-e013ac3a2a95-config-volume" (OuterVolumeSpecName: "config-volume") pod "40d3c35c-8c62-46e5-8bb7-e013ac3a2a95" (UID: "40d3c35c-8c62-46e5-8bb7-e013ac3a2a95"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 08:00:03 crc kubenswrapper[4875]: I1007 08:00:03.209440 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/40d3c35c-8c62-46e5-8bb7-e013ac3a2a95-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "40d3c35c-8c62-46e5-8bb7-e013ac3a2a95" (UID: "40d3c35c-8c62-46e5-8bb7-e013ac3a2a95"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:00:03 crc kubenswrapper[4875]: I1007 08:00:03.211165 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/40d3c35c-8c62-46e5-8bb7-e013ac3a2a95-kube-api-access-9tlc4" (OuterVolumeSpecName: "kube-api-access-9tlc4") pod "40d3c35c-8c62-46e5-8bb7-e013ac3a2a95" (UID: "40d3c35c-8c62-46e5-8bb7-e013ac3a2a95"). InnerVolumeSpecName "kube-api-access-9tlc4". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 08:00:03 crc kubenswrapper[4875]: I1007 08:00:03.308481 4875 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/40d3c35c-8c62-46e5-8bb7-e013ac3a2a95-config-volume\") on node \"crc\" DevicePath \"\"" Oct 07 08:00:03 crc kubenswrapper[4875]: I1007 08:00:03.308522 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9tlc4\" (UniqueName: \"kubernetes.io/projected/40d3c35c-8c62-46e5-8bb7-e013ac3a2a95-kube-api-access-9tlc4\") on node \"crc\" DevicePath \"\"" Oct 07 08:00:03 crc kubenswrapper[4875]: I1007 08:00:03.308536 4875 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/40d3c35c-8c62-46e5-8bb7-e013ac3a2a95-secret-volume\") on node \"crc\" DevicePath \"\"" Oct 07 08:00:03 crc kubenswrapper[4875]: I1007 08:00:03.800262 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29330400-j2pbn" event={"ID":"40d3c35c-8c62-46e5-8bb7-e013ac3a2a95","Type":"ContainerDied","Data":"eb93b6b8b03ce9131b59e017c6408cfcae9aec406acd4b157c3adb9181228f8b"} Oct 07 08:00:03 crc kubenswrapper[4875]: I1007 08:00:03.800729 4875 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="eb93b6b8b03ce9131b59e017c6408cfcae9aec406acd4b157c3adb9181228f8b" Oct 07 08:00:03 crc kubenswrapper[4875]: I1007 08:00:03.800305 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29330400-j2pbn" Oct 07 08:00:03 crc kubenswrapper[4875]: I1007 08:00:03.802969 4875 generic.go:334] "Generic (PLEG): container finished" podID="36b27857-30e7-48c7-b0c9-5644dd4f631a" containerID="7b6a9831f2e00c9866ae5b01e336109d19d5fb499c1c769579adf1bd1a12ea7a" exitCode=0 Oct 07 08:00:03 crc kubenswrapper[4875]: I1007 08:00:03.803056 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4z9kn" event={"ID":"36b27857-30e7-48c7-b0c9-5644dd4f631a","Type":"ContainerDied","Data":"7b6a9831f2e00c9866ae5b01e336109d19d5fb499c1c769579adf1bd1a12ea7a"} Oct 07 08:00:03 crc kubenswrapper[4875]: I1007 08:00:03.809522 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-p58wr" event={"ID":"61ff4df8-3587-4330-96c1-c01e05ebd803","Type":"ContainerStarted","Data":"12a66fb9cc9948862ed7022bc38e317c7340a28bd7626c30883a98df44cf6ef4"} Oct 07 08:00:03 crc kubenswrapper[4875]: I1007 08:00:03.853543 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-p58wr" podStartSLOduration=3.675463422 podStartE2EDuration="1m18.853519771s" podCreationTimestamp="2025-10-07 07:58:45 +0000 UTC" firstStartedPulling="2025-10-07 07:58:48.258850648 +0000 UTC m=+153.218621191" lastFinishedPulling="2025-10-07 08:00:03.436906997 +0000 UTC m=+228.396677540" observedRunningTime="2025-10-07 08:00:03.848486887 +0000 UTC m=+228.808257450" watchObservedRunningTime="2025-10-07 08:00:03.853519771 +0000 UTC m=+228.813290314" Oct 07 08:00:04 crc kubenswrapper[4875]: I1007 08:00:04.820172 4875 generic.go:334] "Generic (PLEG): container finished" podID="e2939977-2c4d-49a5-9aaf-a9ff78f7925f" containerID="fc7761140bcb281339461b717b9bc5fafff95d3c3ff28a1ced4152b2c6253a12" exitCode=0 Oct 07 08:00:04 crc kubenswrapper[4875]: I1007 08:00:04.820281 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mlrfc" event={"ID":"e2939977-2c4d-49a5-9aaf-a9ff78f7925f","Type":"ContainerDied","Data":"fc7761140bcb281339461b717b9bc5fafff95d3c3ff28a1ced4152b2c6253a12"} Oct 07 08:00:05 crc kubenswrapper[4875]: I1007 08:00:05.828626 4875 generic.go:334] "Generic (PLEG): container finished" podID="09dd69c8-1a2a-40d8-8087-a8e895ba97ee" containerID="86f23fbd1a933f0638a0616ab3c8a72fea0722d61cd101b7f1dcbe7f4d2be039" exitCode=0 Oct 07 08:00:05 crc kubenswrapper[4875]: I1007 08:00:05.828690 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-bqwr8" event={"ID":"09dd69c8-1a2a-40d8-8087-a8e895ba97ee","Type":"ContainerDied","Data":"86f23fbd1a933f0638a0616ab3c8a72fea0722d61cd101b7f1dcbe7f4d2be039"} Oct 07 08:00:05 crc kubenswrapper[4875]: I1007 08:00:05.835222 4875 generic.go:334] "Generic (PLEG): container finished" podID="62c991b5-7e34-484b-8346-2db6f586c972" containerID="430ee1689a2d2a4bcd8f6dfed404dd5bf5066cae9fa53ddfa847d17bc3f70684" exitCode=0 Oct 07 08:00:05 crc kubenswrapper[4875]: I1007 08:00:05.835289 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-htjvv" event={"ID":"62c991b5-7e34-484b-8346-2db6f586c972","Type":"ContainerDied","Data":"430ee1689a2d2a4bcd8f6dfed404dd5bf5066cae9fa53ddfa847d17bc3f70684"} Oct 07 08:00:05 crc kubenswrapper[4875]: I1007 08:00:05.848842 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4z9kn" event={"ID":"36b27857-30e7-48c7-b0c9-5644dd4f631a","Type":"ContainerStarted","Data":"5510f2981d6275d42a7b5395b1497bb8f0216f7e1e606e9fd1f802a052b4dc88"} Oct 07 08:00:05 crc kubenswrapper[4875]: I1007 08:00:05.856610 4875 generic.go:334] "Generic (PLEG): container finished" podID="59aafada-cdff-4904-a432-0b9e632a8e8f" containerID="2822bf20294a3a6db926a5611b8433145364c6c1ca41adb2ce55d607799c100a" exitCode=0 Oct 07 08:00:05 crc kubenswrapper[4875]: I1007 08:00:05.856694 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wjppx" event={"ID":"59aafada-cdff-4904-a432-0b9e632a8e8f","Type":"ContainerDied","Data":"2822bf20294a3a6db926a5611b8433145364c6c1ca41adb2ce55d607799c100a"} Oct 07 08:00:05 crc kubenswrapper[4875]: I1007 08:00:05.901058 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-4z9kn" podStartSLOduration=4.309244695 podStartE2EDuration="1m21.900994234s" podCreationTimestamp="2025-10-07 07:58:44 +0000 UTC" firstStartedPulling="2025-10-07 07:58:47.213642313 +0000 UTC m=+152.173412856" lastFinishedPulling="2025-10-07 08:00:04.805391852 +0000 UTC m=+229.765162395" observedRunningTime="2025-10-07 08:00:05.875488731 +0000 UTC m=+230.835259274" watchObservedRunningTime="2025-10-07 08:00:05.900994234 +0000 UTC m=+230.860764777" Oct 07 08:00:06 crc kubenswrapper[4875]: I1007 08:00:06.337240 4875 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-p58wr" Oct 07 08:00:06 crc kubenswrapper[4875]: I1007 08:00:06.337404 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-p58wr" Oct 07 08:00:06 crc kubenswrapper[4875]: I1007 08:00:06.869488 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mlrfc" event={"ID":"e2939977-2c4d-49a5-9aaf-a9ff78f7925f","Type":"ContainerStarted","Data":"7ce38aa67a3ff40bb714b93a87b904a528ddd7d8fd21335bd27100616ffdcc02"} Oct 07 08:00:06 crc kubenswrapper[4875]: I1007 08:00:06.876295 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-bqwr8" event={"ID":"09dd69c8-1a2a-40d8-8087-a8e895ba97ee","Type":"ContainerStarted","Data":"246c2eaf5329404e537edac66be50592c0e3ac1b6397f5bfe78a2540b7e7c8d5"} Oct 07 08:00:06 crc kubenswrapper[4875]: I1007 08:00:06.880048 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-htjvv" event={"ID":"62c991b5-7e34-484b-8346-2db6f586c972","Type":"ContainerStarted","Data":"b89b92e77d90201a214aba6bcd0c627362ae486a88c6f4bf15d7f607fe9dc4ea"} Oct 07 08:00:06 crc kubenswrapper[4875]: I1007 08:00:06.896509 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-mlrfc" podStartSLOduration=3.441554966 podStartE2EDuration="1m21.896486814s" podCreationTimestamp="2025-10-07 07:58:45 +0000 UTC" firstStartedPulling="2025-10-07 07:58:47.199120597 +0000 UTC m=+152.158891140" lastFinishedPulling="2025-10-07 08:00:05.654052445 +0000 UTC m=+230.613822988" observedRunningTime="2025-10-07 08:00:06.89474803 +0000 UTC m=+231.854518573" watchObservedRunningTime="2025-10-07 08:00:06.896486814 +0000 UTC m=+231.856257357" Oct 07 08:00:06 crc kubenswrapper[4875]: I1007 08:00:06.915751 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-bqwr8" podStartSLOduration=4.557147907 podStartE2EDuration="1m24.915732974s" podCreationTimestamp="2025-10-07 07:58:42 +0000 UTC" firstStartedPulling="2025-10-07 07:58:46.174463003 +0000 UTC m=+151.134233546" lastFinishedPulling="2025-10-07 08:00:06.53304806 +0000 UTC m=+231.492818613" observedRunningTime="2025-10-07 08:00:06.913920898 +0000 UTC m=+231.873691461" watchObservedRunningTime="2025-10-07 08:00:06.915732974 +0000 UTC m=+231.875503517" Oct 07 08:00:07 crc kubenswrapper[4875]: I1007 08:00:07.527640 4875 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-p58wr" podUID="61ff4df8-3587-4330-96c1-c01e05ebd803" containerName="registry-server" probeResult="failure" output=< Oct 07 08:00:07 crc kubenswrapper[4875]: timeout: failed to connect service ":50051" within 1s Oct 07 08:00:07 crc kubenswrapper[4875]: > Oct 07 08:00:07 crc kubenswrapper[4875]: I1007 08:00:07.894749 4875 generic.go:334] "Generic (PLEG): container finished" podID="f4b26103-9634-4ff7-b3bd-d3819c51643f" containerID="0962e76645e67f318ae108ec1c8e370c0f034884ac49caa993f6b5fe0693e74e" exitCode=0 Oct 07 08:00:07 crc kubenswrapper[4875]: I1007 08:00:07.895297 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6kwkt" event={"ID":"f4b26103-9634-4ff7-b3bd-d3819c51643f","Type":"ContainerDied","Data":"0962e76645e67f318ae108ec1c8e370c0f034884ac49caa993f6b5fe0693e74e"} Oct 07 08:00:07 crc kubenswrapper[4875]: I1007 08:00:07.907608 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wjppx" event={"ID":"59aafada-cdff-4904-a432-0b9e632a8e8f","Type":"ContainerStarted","Data":"8650fdf4631ed29b80efb002c7b0f7b13e439b1d4a90d9484edee4010a06fe52"} Oct 07 08:00:07 crc kubenswrapper[4875]: I1007 08:00:07.913794 4875 generic.go:334] "Generic (PLEG): container finished" podID="18dc718c-4aa1-460c-b3bf-62df2a1d277b" containerID="2acb694c74e04083fdf5457912d91b3adffefb812d20230e79e563c36ff0d580" exitCode=0 Oct 07 08:00:07 crc kubenswrapper[4875]: I1007 08:00:07.913887 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-x6gdw" event={"ID":"18dc718c-4aa1-460c-b3bf-62df2a1d277b","Type":"ContainerDied","Data":"2acb694c74e04083fdf5457912d91b3adffefb812d20230e79e563c36ff0d580"} Oct 07 08:00:07 crc kubenswrapper[4875]: I1007 08:00:07.917831 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-htjvv" podStartSLOduration=4.10203441 podStartE2EDuration="1m25.917820066s" podCreationTimestamp="2025-10-07 07:58:42 +0000 UTC" firstStartedPulling="2025-10-07 07:58:44.799368434 +0000 UTC m=+149.759138977" lastFinishedPulling="2025-10-07 08:00:06.61515409 +0000 UTC m=+231.574924633" observedRunningTime="2025-10-07 08:00:06.93711427 +0000 UTC m=+231.896884853" watchObservedRunningTime="2025-10-07 08:00:07.917820066 +0000 UTC m=+232.877590609" Oct 07 08:00:07 crc kubenswrapper[4875]: I1007 08:00:07.938066 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-wjppx" podStartSLOduration=4.056306636 podStartE2EDuration="1m25.938038835s" podCreationTimestamp="2025-10-07 07:58:42 +0000 UTC" firstStartedPulling="2025-10-07 07:58:44.83799724 +0000 UTC m=+149.797767783" lastFinishedPulling="2025-10-07 08:00:06.719729439 +0000 UTC m=+231.679499982" observedRunningTime="2025-10-07 08:00:07.93719232 +0000 UTC m=+232.896962883" watchObservedRunningTime="2025-10-07 08:00:07.938038835 +0000 UTC m=+232.897809378" Oct 07 08:00:08 crc kubenswrapper[4875]: I1007 08:00:08.925775 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6kwkt" event={"ID":"f4b26103-9634-4ff7-b3bd-d3819c51643f","Type":"ContainerStarted","Data":"2cacab925991935d1a41004e16c433d5cd7d4bf75821219fd92f29b99b064c24"} Oct 07 08:00:08 crc kubenswrapper[4875]: I1007 08:00:08.929245 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-x6gdw" event={"ID":"18dc718c-4aa1-460c-b3bf-62df2a1d277b","Type":"ContainerStarted","Data":"68465505e2f392b57ac9e3cfc114ef19e731731a023cfe2af7a4128fbb288ca1"} Oct 07 08:00:08 crc kubenswrapper[4875]: I1007 08:00:08.952769 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-6kwkt" podStartSLOduration=2.450930587 podStartE2EDuration="1m24.952741685s" podCreationTimestamp="2025-10-07 07:58:44 +0000 UTC" firstStartedPulling="2025-10-07 07:58:45.930560768 +0000 UTC m=+150.890331311" lastFinishedPulling="2025-10-07 08:00:08.432371866 +0000 UTC m=+233.392142409" observedRunningTime="2025-10-07 08:00:08.949623679 +0000 UTC m=+233.909394232" watchObservedRunningTime="2025-10-07 08:00:08.952741685 +0000 UTC m=+233.912512258" Oct 07 08:00:08 crc kubenswrapper[4875]: I1007 08:00:08.971921 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-x6gdw" podStartSLOduration=4.182498728 podStartE2EDuration="1m26.971901762s" podCreationTimestamp="2025-10-07 07:58:42 +0000 UTC" firstStartedPulling="2025-10-07 07:58:45.891244291 +0000 UTC m=+150.851014834" lastFinishedPulling="2025-10-07 08:00:08.680647325 +0000 UTC m=+233.640417868" observedRunningTime="2025-10-07 08:00:08.970034305 +0000 UTC m=+233.929804848" watchObservedRunningTime="2025-10-07 08:00:08.971901762 +0000 UTC m=+233.931672315" Oct 07 08:00:12 crc kubenswrapper[4875]: I1007 08:00:12.914305 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-htjvv" Oct 07 08:00:12 crc kubenswrapper[4875]: I1007 08:00:12.915454 4875 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-htjvv" Oct 07 08:00:12 crc kubenswrapper[4875]: I1007 08:00:12.984398 4875 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-htjvv" Oct 07 08:00:13 crc kubenswrapper[4875]: I1007 08:00:13.025178 4875 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-bqwr8" Oct 07 08:00:13 crc kubenswrapper[4875]: I1007 08:00:13.025264 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-bqwr8" Oct 07 08:00:13 crc kubenswrapper[4875]: I1007 08:00:13.037779 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-htjvv" Oct 07 08:00:13 crc kubenswrapper[4875]: I1007 08:00:13.069540 4875 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-bqwr8" Oct 07 08:00:13 crc kubenswrapper[4875]: I1007 08:00:13.106669 4875 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-x6gdw" Oct 07 08:00:13 crc kubenswrapper[4875]: I1007 08:00:13.106720 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-x6gdw" Oct 07 08:00:13 crc kubenswrapper[4875]: I1007 08:00:13.152159 4875 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-x6gdw" Oct 07 08:00:13 crc kubenswrapper[4875]: I1007 08:00:13.318237 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-wjppx" Oct 07 08:00:13 crc kubenswrapper[4875]: I1007 08:00:13.318289 4875 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-wjppx" Oct 07 08:00:13 crc kubenswrapper[4875]: I1007 08:00:13.358566 4875 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-wjppx" Oct 07 08:00:13 crc kubenswrapper[4875]: I1007 08:00:13.997758 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-wjppx" Oct 07 08:00:14 crc kubenswrapper[4875]: I1007 08:00:14.008460 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-bqwr8" Oct 07 08:00:14 crc kubenswrapper[4875]: I1007 08:00:14.018245 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-x6gdw" Oct 07 08:00:14 crc kubenswrapper[4875]: I1007 08:00:14.886211 4875 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-6kwkt" Oct 07 08:00:14 crc kubenswrapper[4875]: I1007 08:00:14.886644 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-6kwkt" Oct 07 08:00:14 crc kubenswrapper[4875]: I1007 08:00:14.930437 4875 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-6kwkt" Oct 07 08:00:15 crc kubenswrapper[4875]: I1007 08:00:15.007749 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-6kwkt" Oct 07 08:00:15 crc kubenswrapper[4875]: I1007 08:00:15.311492 4875 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-4z9kn" Oct 07 08:00:15 crc kubenswrapper[4875]: I1007 08:00:15.311584 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-4z9kn" Oct 07 08:00:15 crc kubenswrapper[4875]: I1007 08:00:15.362939 4875 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-4z9kn" Oct 07 08:00:15 crc kubenswrapper[4875]: I1007 08:00:15.900177 4875 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-mlrfc" Oct 07 08:00:15 crc kubenswrapper[4875]: I1007 08:00:15.900686 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-mlrfc" Oct 07 08:00:15 crc kubenswrapper[4875]: I1007 08:00:15.941404 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-x6gdw"] Oct 07 08:00:15 crc kubenswrapper[4875]: I1007 08:00:15.950808 4875 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-mlrfc" Oct 07 08:00:15 crc kubenswrapper[4875]: I1007 08:00:15.976539 4875 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-x6gdw" podUID="18dc718c-4aa1-460c-b3bf-62df2a1d277b" containerName="registry-server" containerID="cri-o://68465505e2f392b57ac9e3cfc114ef19e731731a023cfe2af7a4128fbb288ca1" gracePeriod=2 Oct 07 08:00:16 crc kubenswrapper[4875]: I1007 08:00:16.016648 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-mlrfc" Oct 07 08:00:16 crc kubenswrapper[4875]: I1007 08:00:16.029298 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-4z9kn" Oct 07 08:00:16 crc kubenswrapper[4875]: I1007 08:00:16.140746 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-wjppx"] Oct 07 08:00:16 crc kubenswrapper[4875]: I1007 08:00:16.141077 4875 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-wjppx" podUID="59aafada-cdff-4904-a432-0b9e632a8e8f" containerName="registry-server" containerID="cri-o://8650fdf4631ed29b80efb002c7b0f7b13e439b1d4a90d9484edee4010a06fe52" gracePeriod=2 Oct 07 08:00:16 crc kubenswrapper[4875]: I1007 08:00:16.379809 4875 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-p58wr" Oct 07 08:00:16 crc kubenswrapper[4875]: I1007 08:00:16.424431 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-p58wr" Oct 07 08:00:16 crc kubenswrapper[4875]: I1007 08:00:16.904505 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-hlnjg"] Oct 07 08:00:16 crc kubenswrapper[4875]: I1007 08:00:16.989003 4875 generic.go:334] "Generic (PLEG): container finished" podID="18dc718c-4aa1-460c-b3bf-62df2a1d277b" containerID="68465505e2f392b57ac9e3cfc114ef19e731731a023cfe2af7a4128fbb288ca1" exitCode=0 Oct 07 08:00:16 crc kubenswrapper[4875]: I1007 08:00:16.989108 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-x6gdw" event={"ID":"18dc718c-4aa1-460c-b3bf-62df2a1d277b","Type":"ContainerDied","Data":"68465505e2f392b57ac9e3cfc114ef19e731731a023cfe2af7a4128fbb288ca1"} Oct 07 08:00:16 crc kubenswrapper[4875]: I1007 08:00:16.992268 4875 generic.go:334] "Generic (PLEG): container finished" podID="59aafada-cdff-4904-a432-0b9e632a8e8f" containerID="8650fdf4631ed29b80efb002c7b0f7b13e439b1d4a90d9484edee4010a06fe52" exitCode=0 Oct 07 08:00:16 crc kubenswrapper[4875]: I1007 08:00:16.993179 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wjppx" event={"ID":"59aafada-cdff-4904-a432-0b9e632a8e8f","Type":"ContainerDied","Data":"8650fdf4631ed29b80efb002c7b0f7b13e439b1d4a90d9484edee4010a06fe52"} Oct 07 08:00:17 crc kubenswrapper[4875]: I1007 08:00:17.325930 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-wjppx" Oct 07 08:00:17 crc kubenswrapper[4875]: I1007 08:00:17.447907 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/59aafada-cdff-4904-a432-0b9e632a8e8f-catalog-content\") pod \"59aafada-cdff-4904-a432-0b9e632a8e8f\" (UID: \"59aafada-cdff-4904-a432-0b9e632a8e8f\") " Oct 07 08:00:17 crc kubenswrapper[4875]: I1007 08:00:17.448083 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j2dgx\" (UniqueName: \"kubernetes.io/projected/59aafada-cdff-4904-a432-0b9e632a8e8f-kube-api-access-j2dgx\") pod \"59aafada-cdff-4904-a432-0b9e632a8e8f\" (UID: \"59aafada-cdff-4904-a432-0b9e632a8e8f\") " Oct 07 08:00:17 crc kubenswrapper[4875]: I1007 08:00:17.448152 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/59aafada-cdff-4904-a432-0b9e632a8e8f-utilities\") pod \"59aafada-cdff-4904-a432-0b9e632a8e8f\" (UID: \"59aafada-cdff-4904-a432-0b9e632a8e8f\") " Oct 07 08:00:17 crc kubenswrapper[4875]: I1007 08:00:17.449240 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/59aafada-cdff-4904-a432-0b9e632a8e8f-utilities" (OuterVolumeSpecName: "utilities") pod "59aafada-cdff-4904-a432-0b9e632a8e8f" (UID: "59aafada-cdff-4904-a432-0b9e632a8e8f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 08:00:17 crc kubenswrapper[4875]: I1007 08:00:17.455864 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/59aafada-cdff-4904-a432-0b9e632a8e8f-kube-api-access-j2dgx" (OuterVolumeSpecName: "kube-api-access-j2dgx") pod "59aafada-cdff-4904-a432-0b9e632a8e8f" (UID: "59aafada-cdff-4904-a432-0b9e632a8e8f"). InnerVolumeSpecName "kube-api-access-j2dgx". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 08:00:17 crc kubenswrapper[4875]: I1007 08:00:17.550824 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j2dgx\" (UniqueName: \"kubernetes.io/projected/59aafada-cdff-4904-a432-0b9e632a8e8f-kube-api-access-j2dgx\") on node \"crc\" DevicePath \"\"" Oct 07 08:00:17 crc kubenswrapper[4875]: I1007 08:00:17.550867 4875 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/59aafada-cdff-4904-a432-0b9e632a8e8f-utilities\") on node \"crc\" DevicePath \"\"" Oct 07 08:00:17 crc kubenswrapper[4875]: I1007 08:00:17.665273 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-x6gdw" Oct 07 08:00:17 crc kubenswrapper[4875]: I1007 08:00:17.693364 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/59aafada-cdff-4904-a432-0b9e632a8e8f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "59aafada-cdff-4904-a432-0b9e632a8e8f" (UID: "59aafada-cdff-4904-a432-0b9e632a8e8f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 08:00:17 crc kubenswrapper[4875]: I1007 08:00:17.752536 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/18dc718c-4aa1-460c-b3bf-62df2a1d277b-utilities\") pod \"18dc718c-4aa1-460c-b3bf-62df2a1d277b\" (UID: \"18dc718c-4aa1-460c-b3bf-62df2a1d277b\") " Oct 07 08:00:17 crc kubenswrapper[4875]: I1007 08:00:17.752604 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n4tm8\" (UniqueName: \"kubernetes.io/projected/18dc718c-4aa1-460c-b3bf-62df2a1d277b-kube-api-access-n4tm8\") pod \"18dc718c-4aa1-460c-b3bf-62df2a1d277b\" (UID: \"18dc718c-4aa1-460c-b3bf-62df2a1d277b\") " Oct 07 08:00:17 crc kubenswrapper[4875]: I1007 08:00:17.752663 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/18dc718c-4aa1-460c-b3bf-62df2a1d277b-catalog-content\") pod \"18dc718c-4aa1-460c-b3bf-62df2a1d277b\" (UID: \"18dc718c-4aa1-460c-b3bf-62df2a1d277b\") " Oct 07 08:00:17 crc kubenswrapper[4875]: I1007 08:00:17.753043 4875 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/59aafada-cdff-4904-a432-0b9e632a8e8f-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 07 08:00:17 crc kubenswrapper[4875]: I1007 08:00:17.756622 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/18dc718c-4aa1-460c-b3bf-62df2a1d277b-utilities" (OuterVolumeSpecName: "utilities") pod "18dc718c-4aa1-460c-b3bf-62df2a1d277b" (UID: "18dc718c-4aa1-460c-b3bf-62df2a1d277b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 08:00:17 crc kubenswrapper[4875]: I1007 08:00:17.771454 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/18dc718c-4aa1-460c-b3bf-62df2a1d277b-kube-api-access-n4tm8" (OuterVolumeSpecName: "kube-api-access-n4tm8") pod "18dc718c-4aa1-460c-b3bf-62df2a1d277b" (UID: "18dc718c-4aa1-460c-b3bf-62df2a1d277b"). InnerVolumeSpecName "kube-api-access-n4tm8". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 08:00:17 crc kubenswrapper[4875]: I1007 08:00:17.812023 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/18dc718c-4aa1-460c-b3bf-62df2a1d277b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "18dc718c-4aa1-460c-b3bf-62df2a1d277b" (UID: "18dc718c-4aa1-460c-b3bf-62df2a1d277b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 08:00:17 crc kubenswrapper[4875]: I1007 08:00:17.854587 4875 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/18dc718c-4aa1-460c-b3bf-62df2a1d277b-utilities\") on node \"crc\" DevicePath \"\"" Oct 07 08:00:17 crc kubenswrapper[4875]: I1007 08:00:17.854641 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n4tm8\" (UniqueName: \"kubernetes.io/projected/18dc718c-4aa1-460c-b3bf-62df2a1d277b-kube-api-access-n4tm8\") on node \"crc\" DevicePath \"\"" Oct 07 08:00:17 crc kubenswrapper[4875]: I1007 08:00:17.854674 4875 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/18dc718c-4aa1-460c-b3bf-62df2a1d277b-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 07 08:00:18 crc kubenswrapper[4875]: I1007 08:00:18.001918 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-x6gdw" Oct 07 08:00:18 crc kubenswrapper[4875]: I1007 08:00:18.002048 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-x6gdw" event={"ID":"18dc718c-4aa1-460c-b3bf-62df2a1d277b","Type":"ContainerDied","Data":"b172f4eaf74ad9bd9047ce5ca1b66e28cf3b88622d4d4d447dcd77770ccd340e"} Oct 07 08:00:18 crc kubenswrapper[4875]: I1007 08:00:18.002629 4875 scope.go:117] "RemoveContainer" containerID="68465505e2f392b57ac9e3cfc114ef19e731731a023cfe2af7a4128fbb288ca1" Oct 07 08:00:18 crc kubenswrapper[4875]: I1007 08:00:18.006029 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wjppx" event={"ID":"59aafada-cdff-4904-a432-0b9e632a8e8f","Type":"ContainerDied","Data":"32d5fb57de668f70403e0705524ecf92bbd33474b40e2e78c3c483e24a24a411"} Oct 07 08:00:18 crc kubenswrapper[4875]: I1007 08:00:18.006084 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-wjppx" Oct 07 08:00:18 crc kubenswrapper[4875]: I1007 08:00:18.027753 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-wjppx"] Oct 07 08:00:18 crc kubenswrapper[4875]: I1007 08:00:18.030621 4875 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-wjppx"] Oct 07 08:00:18 crc kubenswrapper[4875]: I1007 08:00:18.036465 4875 scope.go:117] "RemoveContainer" containerID="2acb694c74e04083fdf5457912d91b3adffefb812d20230e79e563c36ff0d580" Oct 07 08:00:18 crc kubenswrapper[4875]: I1007 08:00:18.047803 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-x6gdw"] Oct 07 08:00:18 crc kubenswrapper[4875]: I1007 08:00:18.051733 4875 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-x6gdw"] Oct 07 08:00:18 crc kubenswrapper[4875]: I1007 08:00:18.060401 4875 scope.go:117] "RemoveContainer" containerID="11094cfd39db106746b212f9221c9fc0dc77231f532660c53aaa1dfc775616ce" Oct 07 08:00:18 crc kubenswrapper[4875]: I1007 08:00:18.083441 4875 scope.go:117] "RemoveContainer" containerID="8650fdf4631ed29b80efb002c7b0f7b13e439b1d4a90d9484edee4010a06fe52" Oct 07 08:00:18 crc kubenswrapper[4875]: I1007 08:00:18.098692 4875 scope.go:117] "RemoveContainer" containerID="2822bf20294a3a6db926a5611b8433145364c6c1ca41adb2ce55d607799c100a" Oct 07 08:00:18 crc kubenswrapper[4875]: I1007 08:00:18.119403 4875 scope.go:117] "RemoveContainer" containerID="fa04d0b1c8e9234ef09b58e873de1de62354bd1aec0103150e54c3f35f2feb23" Oct 07 08:00:18 crc kubenswrapper[4875]: I1007 08:00:18.339481 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-4z9kn"] Oct 07 08:00:18 crc kubenswrapper[4875]: I1007 08:00:18.341106 4875 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-4z9kn" podUID="36b27857-30e7-48c7-b0c9-5644dd4f631a" containerName="registry-server" containerID="cri-o://5510f2981d6275d42a7b5395b1497bb8f0216f7e1e606e9fd1f802a052b4dc88" gracePeriod=2 Oct 07 08:00:19 crc kubenswrapper[4875]: I1007 08:00:19.016203 4875 generic.go:334] "Generic (PLEG): container finished" podID="36b27857-30e7-48c7-b0c9-5644dd4f631a" containerID="5510f2981d6275d42a7b5395b1497bb8f0216f7e1e606e9fd1f802a052b4dc88" exitCode=0 Oct 07 08:00:19 crc kubenswrapper[4875]: I1007 08:00:19.016277 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4z9kn" event={"ID":"36b27857-30e7-48c7-b0c9-5644dd4f631a","Type":"ContainerDied","Data":"5510f2981d6275d42a7b5395b1497bb8f0216f7e1e606e9fd1f802a052b4dc88"} Oct 07 08:00:19 crc kubenswrapper[4875]: I1007 08:00:19.616723 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-4z9kn" Oct 07 08:00:19 crc kubenswrapper[4875]: I1007 08:00:19.681915 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/36b27857-30e7-48c7-b0c9-5644dd4f631a-catalog-content\") pod \"36b27857-30e7-48c7-b0c9-5644dd4f631a\" (UID: \"36b27857-30e7-48c7-b0c9-5644dd4f631a\") " Oct 07 08:00:19 crc kubenswrapper[4875]: I1007 08:00:19.683199 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vt9n7\" (UniqueName: \"kubernetes.io/projected/36b27857-30e7-48c7-b0c9-5644dd4f631a-kube-api-access-vt9n7\") pod \"36b27857-30e7-48c7-b0c9-5644dd4f631a\" (UID: \"36b27857-30e7-48c7-b0c9-5644dd4f631a\") " Oct 07 08:00:19 crc kubenswrapper[4875]: I1007 08:00:19.683369 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/36b27857-30e7-48c7-b0c9-5644dd4f631a-utilities\") pod \"36b27857-30e7-48c7-b0c9-5644dd4f631a\" (UID: \"36b27857-30e7-48c7-b0c9-5644dd4f631a\") " Oct 07 08:00:19 crc kubenswrapper[4875]: I1007 08:00:19.684767 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/36b27857-30e7-48c7-b0c9-5644dd4f631a-utilities" (OuterVolumeSpecName: "utilities") pod "36b27857-30e7-48c7-b0c9-5644dd4f631a" (UID: "36b27857-30e7-48c7-b0c9-5644dd4f631a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 08:00:19 crc kubenswrapper[4875]: I1007 08:00:19.690490 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/36b27857-30e7-48c7-b0c9-5644dd4f631a-kube-api-access-vt9n7" (OuterVolumeSpecName: "kube-api-access-vt9n7") pod "36b27857-30e7-48c7-b0c9-5644dd4f631a" (UID: "36b27857-30e7-48c7-b0c9-5644dd4f631a"). InnerVolumeSpecName "kube-api-access-vt9n7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 08:00:19 crc kubenswrapper[4875]: I1007 08:00:19.703661 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/36b27857-30e7-48c7-b0c9-5644dd4f631a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "36b27857-30e7-48c7-b0c9-5644dd4f631a" (UID: "36b27857-30e7-48c7-b0c9-5644dd4f631a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 08:00:19 crc kubenswrapper[4875]: I1007 08:00:19.706581 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="18dc718c-4aa1-460c-b3bf-62df2a1d277b" path="/var/lib/kubelet/pods/18dc718c-4aa1-460c-b3bf-62df2a1d277b/volumes" Oct 07 08:00:19 crc kubenswrapper[4875]: I1007 08:00:19.712238 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="59aafada-cdff-4904-a432-0b9e632a8e8f" path="/var/lib/kubelet/pods/59aafada-cdff-4904-a432-0b9e632a8e8f/volumes" Oct 07 08:00:19 crc kubenswrapper[4875]: I1007 08:00:19.785047 4875 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/36b27857-30e7-48c7-b0c9-5644dd4f631a-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 07 08:00:19 crc kubenswrapper[4875]: I1007 08:00:19.785108 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vt9n7\" (UniqueName: \"kubernetes.io/projected/36b27857-30e7-48c7-b0c9-5644dd4f631a-kube-api-access-vt9n7\") on node \"crc\" DevicePath \"\"" Oct 07 08:00:19 crc kubenswrapper[4875]: I1007 08:00:19.785131 4875 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/36b27857-30e7-48c7-b0c9-5644dd4f631a-utilities\") on node \"crc\" DevicePath \"\"" Oct 07 08:00:20 crc kubenswrapper[4875]: I1007 08:00:20.028014 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4z9kn" event={"ID":"36b27857-30e7-48c7-b0c9-5644dd4f631a","Type":"ContainerDied","Data":"83c21425843c6d97a1d0564beebb9b2273545ef3aa519957647f95a33b450cc0"} Oct 07 08:00:20 crc kubenswrapper[4875]: I1007 08:00:20.028077 4875 scope.go:117] "RemoveContainer" containerID="5510f2981d6275d42a7b5395b1497bb8f0216f7e1e606e9fd1f802a052b4dc88" Oct 07 08:00:20 crc kubenswrapper[4875]: I1007 08:00:20.028128 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-4z9kn" Oct 07 08:00:20 crc kubenswrapper[4875]: I1007 08:00:20.049906 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-4z9kn"] Oct 07 08:00:20 crc kubenswrapper[4875]: I1007 08:00:20.065239 4875 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-4z9kn"] Oct 07 08:00:20 crc kubenswrapper[4875]: I1007 08:00:20.065628 4875 scope.go:117] "RemoveContainer" containerID="7b6a9831f2e00c9866ae5b01e336109d19d5fb499c1c769579adf1bd1a12ea7a" Oct 07 08:00:20 crc kubenswrapper[4875]: I1007 08:00:20.082467 4875 scope.go:117] "RemoveContainer" containerID="8ed34763eaee3e15150bc86cc25aa946c94c5b6925f03520431087025da8a2e0" Oct 07 08:00:20 crc kubenswrapper[4875]: I1007 08:00:20.537712 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-p58wr"] Oct 07 08:00:20 crc kubenswrapper[4875]: I1007 08:00:20.538042 4875 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-p58wr" podUID="61ff4df8-3587-4330-96c1-c01e05ebd803" containerName="registry-server" containerID="cri-o://12a66fb9cc9948862ed7022bc38e317c7340a28bd7626c30883a98df44cf6ef4" gracePeriod=2 Oct 07 08:00:20 crc kubenswrapper[4875]: I1007 08:00:20.888366 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-p58wr" Oct 07 08:00:21 crc kubenswrapper[4875]: I1007 08:00:21.001335 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/61ff4df8-3587-4330-96c1-c01e05ebd803-catalog-content\") pod \"61ff4df8-3587-4330-96c1-c01e05ebd803\" (UID: \"61ff4df8-3587-4330-96c1-c01e05ebd803\") " Oct 07 08:00:21 crc kubenswrapper[4875]: I1007 08:00:21.001409 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lz58f\" (UniqueName: \"kubernetes.io/projected/61ff4df8-3587-4330-96c1-c01e05ebd803-kube-api-access-lz58f\") pod \"61ff4df8-3587-4330-96c1-c01e05ebd803\" (UID: \"61ff4df8-3587-4330-96c1-c01e05ebd803\") " Oct 07 08:00:21 crc kubenswrapper[4875]: I1007 08:00:21.001456 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/61ff4df8-3587-4330-96c1-c01e05ebd803-utilities\") pod \"61ff4df8-3587-4330-96c1-c01e05ebd803\" (UID: \"61ff4df8-3587-4330-96c1-c01e05ebd803\") " Oct 07 08:00:21 crc kubenswrapper[4875]: I1007 08:00:21.002902 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/61ff4df8-3587-4330-96c1-c01e05ebd803-utilities" (OuterVolumeSpecName: "utilities") pod "61ff4df8-3587-4330-96c1-c01e05ebd803" (UID: "61ff4df8-3587-4330-96c1-c01e05ebd803"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 08:00:21 crc kubenswrapper[4875]: I1007 08:00:21.010844 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/61ff4df8-3587-4330-96c1-c01e05ebd803-kube-api-access-lz58f" (OuterVolumeSpecName: "kube-api-access-lz58f") pod "61ff4df8-3587-4330-96c1-c01e05ebd803" (UID: "61ff4df8-3587-4330-96c1-c01e05ebd803"). InnerVolumeSpecName "kube-api-access-lz58f". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 08:00:21 crc kubenswrapper[4875]: I1007 08:00:21.039535 4875 generic.go:334] "Generic (PLEG): container finished" podID="61ff4df8-3587-4330-96c1-c01e05ebd803" containerID="12a66fb9cc9948862ed7022bc38e317c7340a28bd7626c30883a98df44cf6ef4" exitCode=0 Oct 07 08:00:21 crc kubenswrapper[4875]: I1007 08:00:21.039607 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-p58wr" Oct 07 08:00:21 crc kubenswrapper[4875]: I1007 08:00:21.039609 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-p58wr" event={"ID":"61ff4df8-3587-4330-96c1-c01e05ebd803","Type":"ContainerDied","Data":"12a66fb9cc9948862ed7022bc38e317c7340a28bd7626c30883a98df44cf6ef4"} Oct 07 08:00:21 crc kubenswrapper[4875]: I1007 08:00:21.039673 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-p58wr" event={"ID":"61ff4df8-3587-4330-96c1-c01e05ebd803","Type":"ContainerDied","Data":"2f7ee507dce12d80b8167bbbebf4f84c1222646a0659f382a8845217159c52a4"} Oct 07 08:00:21 crc kubenswrapper[4875]: I1007 08:00:21.039722 4875 scope.go:117] "RemoveContainer" containerID="12a66fb9cc9948862ed7022bc38e317c7340a28bd7626c30883a98df44cf6ef4" Oct 07 08:00:21 crc kubenswrapper[4875]: I1007 08:00:21.057723 4875 scope.go:117] "RemoveContainer" containerID="169b7612dd0a489e6bc65631b6b663e83461bc7954a0f32b6b061df7895b3739" Oct 07 08:00:21 crc kubenswrapper[4875]: I1007 08:00:21.072271 4875 scope.go:117] "RemoveContainer" containerID="55050330219edf9d3128afed9b81b93d71108286ee205605694df3d26575b183" Oct 07 08:00:21 crc kubenswrapper[4875]: I1007 08:00:21.102636 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lz58f\" (UniqueName: \"kubernetes.io/projected/61ff4df8-3587-4330-96c1-c01e05ebd803-kube-api-access-lz58f\") on node \"crc\" DevicePath \"\"" Oct 07 08:00:21 crc kubenswrapper[4875]: I1007 08:00:21.102665 4875 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/61ff4df8-3587-4330-96c1-c01e05ebd803-utilities\") on node \"crc\" DevicePath \"\"" Oct 07 08:00:21 crc kubenswrapper[4875]: I1007 08:00:21.103948 4875 scope.go:117] "RemoveContainer" containerID="12a66fb9cc9948862ed7022bc38e317c7340a28bd7626c30883a98df44cf6ef4" Oct 07 08:00:21 crc kubenswrapper[4875]: E1007 08:00:21.106127 4875 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"12a66fb9cc9948862ed7022bc38e317c7340a28bd7626c30883a98df44cf6ef4\": container with ID starting with 12a66fb9cc9948862ed7022bc38e317c7340a28bd7626c30883a98df44cf6ef4 not found: ID does not exist" containerID="12a66fb9cc9948862ed7022bc38e317c7340a28bd7626c30883a98df44cf6ef4" Oct 07 08:00:21 crc kubenswrapper[4875]: I1007 08:00:21.106178 4875 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"12a66fb9cc9948862ed7022bc38e317c7340a28bd7626c30883a98df44cf6ef4"} err="failed to get container status \"12a66fb9cc9948862ed7022bc38e317c7340a28bd7626c30883a98df44cf6ef4\": rpc error: code = NotFound desc = could not find container \"12a66fb9cc9948862ed7022bc38e317c7340a28bd7626c30883a98df44cf6ef4\": container with ID starting with 12a66fb9cc9948862ed7022bc38e317c7340a28bd7626c30883a98df44cf6ef4 not found: ID does not exist" Oct 07 08:00:21 crc kubenswrapper[4875]: I1007 08:00:21.106209 4875 scope.go:117] "RemoveContainer" containerID="169b7612dd0a489e6bc65631b6b663e83461bc7954a0f32b6b061df7895b3739" Oct 07 08:00:21 crc kubenswrapper[4875]: E1007 08:00:21.107036 4875 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"169b7612dd0a489e6bc65631b6b663e83461bc7954a0f32b6b061df7895b3739\": container with ID starting with 169b7612dd0a489e6bc65631b6b663e83461bc7954a0f32b6b061df7895b3739 not found: ID does not exist" containerID="169b7612dd0a489e6bc65631b6b663e83461bc7954a0f32b6b061df7895b3739" Oct 07 08:00:21 crc kubenswrapper[4875]: I1007 08:00:21.107099 4875 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"169b7612dd0a489e6bc65631b6b663e83461bc7954a0f32b6b061df7895b3739"} err="failed to get container status \"169b7612dd0a489e6bc65631b6b663e83461bc7954a0f32b6b061df7895b3739\": rpc error: code = NotFound desc = could not find container \"169b7612dd0a489e6bc65631b6b663e83461bc7954a0f32b6b061df7895b3739\": container with ID starting with 169b7612dd0a489e6bc65631b6b663e83461bc7954a0f32b6b061df7895b3739 not found: ID does not exist" Oct 07 08:00:21 crc kubenswrapper[4875]: I1007 08:00:21.107143 4875 scope.go:117] "RemoveContainer" containerID="55050330219edf9d3128afed9b81b93d71108286ee205605694df3d26575b183" Oct 07 08:00:21 crc kubenswrapper[4875]: E1007 08:00:21.107595 4875 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"55050330219edf9d3128afed9b81b93d71108286ee205605694df3d26575b183\": container with ID starting with 55050330219edf9d3128afed9b81b93d71108286ee205605694df3d26575b183 not found: ID does not exist" containerID="55050330219edf9d3128afed9b81b93d71108286ee205605694df3d26575b183" Oct 07 08:00:21 crc kubenswrapper[4875]: I1007 08:00:21.107633 4875 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"55050330219edf9d3128afed9b81b93d71108286ee205605694df3d26575b183"} err="failed to get container status \"55050330219edf9d3128afed9b81b93d71108286ee205605694df3d26575b183\": rpc error: code = NotFound desc = could not find container \"55050330219edf9d3128afed9b81b93d71108286ee205605694df3d26575b183\": container with ID starting with 55050330219edf9d3128afed9b81b93d71108286ee205605694df3d26575b183 not found: ID does not exist" Oct 07 08:00:21 crc kubenswrapper[4875]: I1007 08:00:21.119008 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/61ff4df8-3587-4330-96c1-c01e05ebd803-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "61ff4df8-3587-4330-96c1-c01e05ebd803" (UID: "61ff4df8-3587-4330-96c1-c01e05ebd803"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 08:00:21 crc kubenswrapper[4875]: I1007 08:00:21.203833 4875 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/61ff4df8-3587-4330-96c1-c01e05ebd803-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 07 08:00:21 crc kubenswrapper[4875]: I1007 08:00:21.370508 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-p58wr"] Oct 07 08:00:21 crc kubenswrapper[4875]: I1007 08:00:21.373015 4875 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-p58wr"] Oct 07 08:00:21 crc kubenswrapper[4875]: I1007 08:00:21.706321 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="36b27857-30e7-48c7-b0c9-5644dd4f631a" path="/var/lib/kubelet/pods/36b27857-30e7-48c7-b0c9-5644dd4f631a/volumes" Oct 07 08:00:21 crc kubenswrapper[4875]: I1007 08:00:21.707474 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="61ff4df8-3587-4330-96c1-c01e05ebd803" path="/var/lib/kubelet/pods/61ff4df8-3587-4330-96c1-c01e05ebd803/volumes" Oct 07 08:00:41 crc kubenswrapper[4875]: I1007 08:00:41.951998 4875 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-authentication/oauth-openshift-558db77b4-hlnjg" podUID="88313132-e652-4c64-b607-5b806c93e153" containerName="oauth-openshift" containerID="cri-o://495ab7dbc39dfacf2e885b92ae0f1104e2fad2c188a1261e5f4e700504f53d8a" gracePeriod=15 Oct 07 08:00:42 crc kubenswrapper[4875]: I1007 08:00:42.168083 4875 generic.go:334] "Generic (PLEG): container finished" podID="88313132-e652-4c64-b607-5b806c93e153" containerID="495ab7dbc39dfacf2e885b92ae0f1104e2fad2c188a1261e5f4e700504f53d8a" exitCode=0 Oct 07 08:00:42 crc kubenswrapper[4875]: I1007 08:00:42.168124 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-hlnjg" event={"ID":"88313132-e652-4c64-b607-5b806c93e153","Type":"ContainerDied","Data":"495ab7dbc39dfacf2e885b92ae0f1104e2fad2c188a1261e5f4e700504f53d8a"} Oct 07 08:00:42 crc kubenswrapper[4875]: I1007 08:00:42.341422 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-hlnjg" Oct 07 08:00:42 crc kubenswrapper[4875]: I1007 08:00:42.377018 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-66c8cdd865-dd5kg"] Oct 07 08:00:42 crc kubenswrapper[4875]: E1007 08:00:42.377291 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="36b27857-30e7-48c7-b0c9-5644dd4f631a" containerName="extract-utilities" Oct 07 08:00:42 crc kubenswrapper[4875]: I1007 08:00:42.377305 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="36b27857-30e7-48c7-b0c9-5644dd4f631a" containerName="extract-utilities" Oct 07 08:00:42 crc kubenswrapper[4875]: E1007 08:00:42.377315 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="88313132-e652-4c64-b607-5b806c93e153" containerName="oauth-openshift" Oct 07 08:00:42 crc kubenswrapper[4875]: I1007 08:00:42.377321 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="88313132-e652-4c64-b607-5b806c93e153" containerName="oauth-openshift" Oct 07 08:00:42 crc kubenswrapper[4875]: E1007 08:00:42.377334 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="36b27857-30e7-48c7-b0c9-5644dd4f631a" containerName="registry-server" Oct 07 08:00:42 crc kubenswrapper[4875]: I1007 08:00:42.377340 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="36b27857-30e7-48c7-b0c9-5644dd4f631a" containerName="registry-server" Oct 07 08:00:42 crc kubenswrapper[4875]: E1007 08:00:42.377350 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="59aafada-cdff-4904-a432-0b9e632a8e8f" containerName="extract-content" Oct 07 08:00:42 crc kubenswrapper[4875]: I1007 08:00:42.377355 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="59aafada-cdff-4904-a432-0b9e632a8e8f" containerName="extract-content" Oct 07 08:00:42 crc kubenswrapper[4875]: E1007 08:00:42.377363 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="36b27857-30e7-48c7-b0c9-5644dd4f631a" containerName="extract-content" Oct 07 08:00:42 crc kubenswrapper[4875]: I1007 08:00:42.377369 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="36b27857-30e7-48c7-b0c9-5644dd4f631a" containerName="extract-content" Oct 07 08:00:42 crc kubenswrapper[4875]: E1007 08:00:42.377379 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="59aafada-cdff-4904-a432-0b9e632a8e8f" containerName="registry-server" Oct 07 08:00:42 crc kubenswrapper[4875]: I1007 08:00:42.377384 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="59aafada-cdff-4904-a432-0b9e632a8e8f" containerName="registry-server" Oct 07 08:00:42 crc kubenswrapper[4875]: E1007 08:00:42.377397 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="18dc718c-4aa1-460c-b3bf-62df2a1d277b" containerName="extract-content" Oct 07 08:00:42 crc kubenswrapper[4875]: I1007 08:00:42.377403 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="18dc718c-4aa1-460c-b3bf-62df2a1d277b" containerName="extract-content" Oct 07 08:00:42 crc kubenswrapper[4875]: E1007 08:00:42.377411 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="61ff4df8-3587-4330-96c1-c01e05ebd803" containerName="extract-utilities" Oct 07 08:00:42 crc kubenswrapper[4875]: I1007 08:00:42.377417 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="61ff4df8-3587-4330-96c1-c01e05ebd803" containerName="extract-utilities" Oct 07 08:00:42 crc kubenswrapper[4875]: E1007 08:00:42.377425 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="18dc718c-4aa1-460c-b3bf-62df2a1d277b" containerName="registry-server" Oct 07 08:00:42 crc kubenswrapper[4875]: I1007 08:00:42.377430 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="18dc718c-4aa1-460c-b3bf-62df2a1d277b" containerName="registry-server" Oct 07 08:00:42 crc kubenswrapper[4875]: E1007 08:00:42.377438 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="18dc718c-4aa1-460c-b3bf-62df2a1d277b" containerName="extract-utilities" Oct 07 08:00:42 crc kubenswrapper[4875]: I1007 08:00:42.377443 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="18dc718c-4aa1-460c-b3bf-62df2a1d277b" containerName="extract-utilities" Oct 07 08:00:42 crc kubenswrapper[4875]: E1007 08:00:42.377452 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="59aafada-cdff-4904-a432-0b9e632a8e8f" containerName="extract-utilities" Oct 07 08:00:42 crc kubenswrapper[4875]: I1007 08:00:42.377458 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="59aafada-cdff-4904-a432-0b9e632a8e8f" containerName="extract-utilities" Oct 07 08:00:42 crc kubenswrapper[4875]: E1007 08:00:42.377465 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="61ff4df8-3587-4330-96c1-c01e05ebd803" containerName="extract-content" Oct 07 08:00:42 crc kubenswrapper[4875]: I1007 08:00:42.377471 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="61ff4df8-3587-4330-96c1-c01e05ebd803" containerName="extract-content" Oct 07 08:00:42 crc kubenswrapper[4875]: E1007 08:00:42.377478 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="61ff4df8-3587-4330-96c1-c01e05ebd803" containerName="registry-server" Oct 07 08:00:42 crc kubenswrapper[4875]: I1007 08:00:42.377484 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="61ff4df8-3587-4330-96c1-c01e05ebd803" containerName="registry-server" Oct 07 08:00:42 crc kubenswrapper[4875]: E1007 08:00:42.377493 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="40d3c35c-8c62-46e5-8bb7-e013ac3a2a95" containerName="collect-profiles" Oct 07 08:00:42 crc kubenswrapper[4875]: I1007 08:00:42.377498 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="40d3c35c-8c62-46e5-8bb7-e013ac3a2a95" containerName="collect-profiles" Oct 07 08:00:42 crc kubenswrapper[4875]: I1007 08:00:42.377604 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="18dc718c-4aa1-460c-b3bf-62df2a1d277b" containerName="registry-server" Oct 07 08:00:42 crc kubenswrapper[4875]: I1007 08:00:42.377616 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="36b27857-30e7-48c7-b0c9-5644dd4f631a" containerName="registry-server" Oct 07 08:00:42 crc kubenswrapper[4875]: I1007 08:00:42.377625 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="40d3c35c-8c62-46e5-8bb7-e013ac3a2a95" containerName="collect-profiles" Oct 07 08:00:42 crc kubenswrapper[4875]: I1007 08:00:42.377638 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="59aafada-cdff-4904-a432-0b9e632a8e8f" containerName="registry-server" Oct 07 08:00:42 crc kubenswrapper[4875]: I1007 08:00:42.377646 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="61ff4df8-3587-4330-96c1-c01e05ebd803" containerName="registry-server" Oct 07 08:00:42 crc kubenswrapper[4875]: I1007 08:00:42.377652 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="88313132-e652-4c64-b607-5b806c93e153" containerName="oauth-openshift" Oct 07 08:00:42 crc kubenswrapper[4875]: I1007 08:00:42.379219 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-66c8cdd865-dd5kg" Oct 07 08:00:42 crc kubenswrapper[4875]: I1007 08:00:42.397733 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-66c8cdd865-dd5kg"] Oct 07 08:00:42 crc kubenswrapper[4875]: I1007 08:00:42.402696 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/88313132-e652-4c64-b607-5b806c93e153-v4-0-config-system-trusted-ca-bundle\") pod \"88313132-e652-4c64-b607-5b806c93e153\" (UID: \"88313132-e652-4c64-b607-5b806c93e153\") " Oct 07 08:00:42 crc kubenswrapper[4875]: I1007 08:00:42.402764 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/88313132-e652-4c64-b607-5b806c93e153-v4-0-config-system-serving-cert\") pod \"88313132-e652-4c64-b607-5b806c93e153\" (UID: \"88313132-e652-4c64-b607-5b806c93e153\") " Oct 07 08:00:42 crc kubenswrapper[4875]: I1007 08:00:42.403007 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/88313132-e652-4c64-b607-5b806c93e153-v4-0-config-user-template-login\") pod \"88313132-e652-4c64-b607-5b806c93e153\" (UID: \"88313132-e652-4c64-b607-5b806c93e153\") " Oct 07 08:00:42 crc kubenswrapper[4875]: I1007 08:00:42.403036 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/88313132-e652-4c64-b607-5b806c93e153-v4-0-config-system-ocp-branding-template\") pod \"88313132-e652-4c64-b607-5b806c93e153\" (UID: \"88313132-e652-4c64-b607-5b806c93e153\") " Oct 07 08:00:42 crc kubenswrapper[4875]: I1007 08:00:42.403066 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/88313132-e652-4c64-b607-5b806c93e153-v4-0-config-system-session\") pod \"88313132-e652-4c64-b607-5b806c93e153\" (UID: \"88313132-e652-4c64-b607-5b806c93e153\") " Oct 07 08:00:42 crc kubenswrapper[4875]: I1007 08:00:42.403091 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/88313132-e652-4c64-b607-5b806c93e153-v4-0-config-system-cliconfig\") pod \"88313132-e652-4c64-b607-5b806c93e153\" (UID: \"88313132-e652-4c64-b607-5b806c93e153\") " Oct 07 08:00:42 crc kubenswrapper[4875]: I1007 08:00:42.403121 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/88313132-e652-4c64-b607-5b806c93e153-v4-0-config-user-template-provider-selection\") pod \"88313132-e652-4c64-b607-5b806c93e153\" (UID: \"88313132-e652-4c64-b607-5b806c93e153\") " Oct 07 08:00:42 crc kubenswrapper[4875]: I1007 08:00:42.403153 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x6qq9\" (UniqueName: \"kubernetes.io/projected/88313132-e652-4c64-b607-5b806c93e153-kube-api-access-x6qq9\") pod \"88313132-e652-4c64-b607-5b806c93e153\" (UID: \"88313132-e652-4c64-b607-5b806c93e153\") " Oct 07 08:00:42 crc kubenswrapper[4875]: I1007 08:00:42.403224 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/88313132-e652-4c64-b607-5b806c93e153-v4-0-config-user-idp-0-file-data\") pod \"88313132-e652-4c64-b607-5b806c93e153\" (UID: \"88313132-e652-4c64-b607-5b806c93e153\") " Oct 07 08:00:42 crc kubenswrapper[4875]: I1007 08:00:42.403249 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/88313132-e652-4c64-b607-5b806c93e153-audit-dir\") pod \"88313132-e652-4c64-b607-5b806c93e153\" (UID: \"88313132-e652-4c64-b607-5b806c93e153\") " Oct 07 08:00:42 crc kubenswrapper[4875]: I1007 08:00:42.403272 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/88313132-e652-4c64-b607-5b806c93e153-v4-0-config-user-template-error\") pod \"88313132-e652-4c64-b607-5b806c93e153\" (UID: \"88313132-e652-4c64-b607-5b806c93e153\") " Oct 07 08:00:42 crc kubenswrapper[4875]: I1007 08:00:42.403324 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/88313132-e652-4c64-b607-5b806c93e153-v4-0-config-system-router-certs\") pod \"88313132-e652-4c64-b607-5b806c93e153\" (UID: \"88313132-e652-4c64-b607-5b806c93e153\") " Oct 07 08:00:42 crc kubenswrapper[4875]: I1007 08:00:42.403350 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/88313132-e652-4c64-b607-5b806c93e153-v4-0-config-system-service-ca\") pod \"88313132-e652-4c64-b607-5b806c93e153\" (UID: \"88313132-e652-4c64-b607-5b806c93e153\") " Oct 07 08:00:42 crc kubenswrapper[4875]: I1007 08:00:42.403376 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/88313132-e652-4c64-b607-5b806c93e153-audit-policies\") pod \"88313132-e652-4c64-b607-5b806c93e153\" (UID: \"88313132-e652-4c64-b607-5b806c93e153\") " Oct 07 08:00:42 crc kubenswrapper[4875]: I1007 08:00:42.404311 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/88313132-e652-4c64-b607-5b806c93e153-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "88313132-e652-4c64-b607-5b806c93e153" (UID: "88313132-e652-4c64-b607-5b806c93e153"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 08:00:42 crc kubenswrapper[4875]: I1007 08:00:42.404680 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/88313132-e652-4c64-b607-5b806c93e153-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "88313132-e652-4c64-b607-5b806c93e153" (UID: "88313132-e652-4c64-b607-5b806c93e153"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 07 08:00:42 crc kubenswrapper[4875]: I1007 08:00:42.405305 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/88313132-e652-4c64-b607-5b806c93e153-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "88313132-e652-4c64-b607-5b806c93e153" (UID: "88313132-e652-4c64-b607-5b806c93e153"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 08:00:42 crc kubenswrapper[4875]: I1007 08:00:42.405756 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/88313132-e652-4c64-b607-5b806c93e153-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "88313132-e652-4c64-b607-5b806c93e153" (UID: "88313132-e652-4c64-b607-5b806c93e153"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 08:00:42 crc kubenswrapper[4875]: I1007 08:00:42.406074 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/88313132-e652-4c64-b607-5b806c93e153-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "88313132-e652-4c64-b607-5b806c93e153" (UID: "88313132-e652-4c64-b607-5b806c93e153"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 08:00:42 crc kubenswrapper[4875]: I1007 08:00:42.420798 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/88313132-e652-4c64-b607-5b806c93e153-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "88313132-e652-4c64-b607-5b806c93e153" (UID: "88313132-e652-4c64-b607-5b806c93e153"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:00:42 crc kubenswrapper[4875]: I1007 08:00:42.427838 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/88313132-e652-4c64-b607-5b806c93e153-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "88313132-e652-4c64-b607-5b806c93e153" (UID: "88313132-e652-4c64-b607-5b806c93e153"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:00:42 crc kubenswrapper[4875]: I1007 08:00:42.428766 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/88313132-e652-4c64-b607-5b806c93e153-kube-api-access-x6qq9" (OuterVolumeSpecName: "kube-api-access-x6qq9") pod "88313132-e652-4c64-b607-5b806c93e153" (UID: "88313132-e652-4c64-b607-5b806c93e153"). InnerVolumeSpecName "kube-api-access-x6qq9". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 08:00:42 crc kubenswrapper[4875]: I1007 08:00:42.435597 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/88313132-e652-4c64-b607-5b806c93e153-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "88313132-e652-4c64-b607-5b806c93e153" (UID: "88313132-e652-4c64-b607-5b806c93e153"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:00:42 crc kubenswrapper[4875]: I1007 08:00:42.438910 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/88313132-e652-4c64-b607-5b806c93e153-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "88313132-e652-4c64-b607-5b806c93e153" (UID: "88313132-e652-4c64-b607-5b806c93e153"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:00:42 crc kubenswrapper[4875]: I1007 08:00:42.439157 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/88313132-e652-4c64-b607-5b806c93e153-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "88313132-e652-4c64-b607-5b806c93e153" (UID: "88313132-e652-4c64-b607-5b806c93e153"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:00:42 crc kubenswrapper[4875]: I1007 08:00:42.439292 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/88313132-e652-4c64-b607-5b806c93e153-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "88313132-e652-4c64-b607-5b806c93e153" (UID: "88313132-e652-4c64-b607-5b806c93e153"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:00:42 crc kubenswrapper[4875]: I1007 08:00:42.439480 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/88313132-e652-4c64-b607-5b806c93e153-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "88313132-e652-4c64-b607-5b806c93e153" (UID: "88313132-e652-4c64-b607-5b806c93e153"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:00:42 crc kubenswrapper[4875]: I1007 08:00:42.439742 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/88313132-e652-4c64-b607-5b806c93e153-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "88313132-e652-4c64-b607-5b806c93e153" (UID: "88313132-e652-4c64-b607-5b806c93e153"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:00:42 crc kubenswrapper[4875]: I1007 08:00:42.504849 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/d02f3b10-9127-41fa-ac79-09952b04bd74-v4-0-config-user-template-error\") pod \"oauth-openshift-66c8cdd865-dd5kg\" (UID: \"d02f3b10-9127-41fa-ac79-09952b04bd74\") " pod="openshift-authentication/oauth-openshift-66c8cdd865-dd5kg" Oct 07 08:00:42 crc kubenswrapper[4875]: I1007 08:00:42.504920 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/d02f3b10-9127-41fa-ac79-09952b04bd74-v4-0-config-system-router-certs\") pod \"oauth-openshift-66c8cdd865-dd5kg\" (UID: \"d02f3b10-9127-41fa-ac79-09952b04bd74\") " pod="openshift-authentication/oauth-openshift-66c8cdd865-dd5kg" Oct 07 08:00:42 crc kubenswrapper[4875]: I1007 08:00:42.504975 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/d02f3b10-9127-41fa-ac79-09952b04bd74-audit-dir\") pod \"oauth-openshift-66c8cdd865-dd5kg\" (UID: \"d02f3b10-9127-41fa-ac79-09952b04bd74\") " pod="openshift-authentication/oauth-openshift-66c8cdd865-dd5kg" Oct 07 08:00:42 crc kubenswrapper[4875]: I1007 08:00:42.504997 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p5t4s\" (UniqueName: \"kubernetes.io/projected/d02f3b10-9127-41fa-ac79-09952b04bd74-kube-api-access-p5t4s\") pod \"oauth-openshift-66c8cdd865-dd5kg\" (UID: \"d02f3b10-9127-41fa-ac79-09952b04bd74\") " pod="openshift-authentication/oauth-openshift-66c8cdd865-dd5kg" Oct 07 08:00:42 crc kubenswrapper[4875]: I1007 08:00:42.505023 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/d02f3b10-9127-41fa-ac79-09952b04bd74-v4-0-config-system-serving-cert\") pod \"oauth-openshift-66c8cdd865-dd5kg\" (UID: \"d02f3b10-9127-41fa-ac79-09952b04bd74\") " pod="openshift-authentication/oauth-openshift-66c8cdd865-dd5kg" Oct 07 08:00:42 crc kubenswrapper[4875]: I1007 08:00:42.505208 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/d02f3b10-9127-41fa-ac79-09952b04bd74-v4-0-config-user-template-login\") pod \"oauth-openshift-66c8cdd865-dd5kg\" (UID: \"d02f3b10-9127-41fa-ac79-09952b04bd74\") " pod="openshift-authentication/oauth-openshift-66c8cdd865-dd5kg" Oct 07 08:00:42 crc kubenswrapper[4875]: I1007 08:00:42.505348 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/d02f3b10-9127-41fa-ac79-09952b04bd74-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-66c8cdd865-dd5kg\" (UID: \"d02f3b10-9127-41fa-ac79-09952b04bd74\") " pod="openshift-authentication/oauth-openshift-66c8cdd865-dd5kg" Oct 07 08:00:42 crc kubenswrapper[4875]: I1007 08:00:42.505376 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/d02f3b10-9127-41fa-ac79-09952b04bd74-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-66c8cdd865-dd5kg\" (UID: \"d02f3b10-9127-41fa-ac79-09952b04bd74\") " pod="openshift-authentication/oauth-openshift-66c8cdd865-dd5kg" Oct 07 08:00:42 crc kubenswrapper[4875]: I1007 08:00:42.505427 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/d02f3b10-9127-41fa-ac79-09952b04bd74-audit-policies\") pod \"oauth-openshift-66c8cdd865-dd5kg\" (UID: \"d02f3b10-9127-41fa-ac79-09952b04bd74\") " pod="openshift-authentication/oauth-openshift-66c8cdd865-dd5kg" Oct 07 08:00:42 crc kubenswrapper[4875]: I1007 08:00:42.505735 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/d02f3b10-9127-41fa-ac79-09952b04bd74-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-66c8cdd865-dd5kg\" (UID: \"d02f3b10-9127-41fa-ac79-09952b04bd74\") " pod="openshift-authentication/oauth-openshift-66c8cdd865-dd5kg" Oct 07 08:00:42 crc kubenswrapper[4875]: I1007 08:00:42.505811 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/d02f3b10-9127-41fa-ac79-09952b04bd74-v4-0-config-system-session\") pod \"oauth-openshift-66c8cdd865-dd5kg\" (UID: \"d02f3b10-9127-41fa-ac79-09952b04bd74\") " pod="openshift-authentication/oauth-openshift-66c8cdd865-dd5kg" Oct 07 08:00:42 crc kubenswrapper[4875]: I1007 08:00:42.505847 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/d02f3b10-9127-41fa-ac79-09952b04bd74-v4-0-config-system-cliconfig\") pod \"oauth-openshift-66c8cdd865-dd5kg\" (UID: \"d02f3b10-9127-41fa-ac79-09952b04bd74\") " pod="openshift-authentication/oauth-openshift-66c8cdd865-dd5kg" Oct 07 08:00:42 crc kubenswrapper[4875]: I1007 08:00:42.505870 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/d02f3b10-9127-41fa-ac79-09952b04bd74-v4-0-config-system-service-ca\") pod \"oauth-openshift-66c8cdd865-dd5kg\" (UID: \"d02f3b10-9127-41fa-ac79-09952b04bd74\") " pod="openshift-authentication/oauth-openshift-66c8cdd865-dd5kg" Oct 07 08:00:42 crc kubenswrapper[4875]: I1007 08:00:42.505941 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/d02f3b10-9127-41fa-ac79-09952b04bd74-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-66c8cdd865-dd5kg\" (UID: \"d02f3b10-9127-41fa-ac79-09952b04bd74\") " pod="openshift-authentication/oauth-openshift-66c8cdd865-dd5kg" Oct 07 08:00:42 crc kubenswrapper[4875]: I1007 08:00:42.506237 4875 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/88313132-e652-4c64-b607-5b806c93e153-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 08:00:42 crc kubenswrapper[4875]: I1007 08:00:42.506293 4875 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/88313132-e652-4c64-b607-5b806c93e153-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 07 08:00:42 crc kubenswrapper[4875]: I1007 08:00:42.506306 4875 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/88313132-e652-4c64-b607-5b806c93e153-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Oct 07 08:00:42 crc kubenswrapper[4875]: I1007 08:00:42.506316 4875 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/88313132-e652-4c64-b607-5b806c93e153-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Oct 07 08:00:42 crc kubenswrapper[4875]: I1007 08:00:42.506327 4875 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/88313132-e652-4c64-b607-5b806c93e153-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Oct 07 08:00:42 crc kubenswrapper[4875]: I1007 08:00:42.506337 4875 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/88313132-e652-4c64-b607-5b806c93e153-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Oct 07 08:00:42 crc kubenswrapper[4875]: I1007 08:00:42.506348 4875 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/88313132-e652-4c64-b607-5b806c93e153-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Oct 07 08:00:42 crc kubenswrapper[4875]: I1007 08:00:42.506361 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x6qq9\" (UniqueName: \"kubernetes.io/projected/88313132-e652-4c64-b607-5b806c93e153-kube-api-access-x6qq9\") on node \"crc\" DevicePath \"\"" Oct 07 08:00:42 crc kubenswrapper[4875]: I1007 08:00:42.506372 4875 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/88313132-e652-4c64-b607-5b806c93e153-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Oct 07 08:00:42 crc kubenswrapper[4875]: I1007 08:00:42.506385 4875 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/88313132-e652-4c64-b607-5b806c93e153-audit-dir\") on node \"crc\" DevicePath \"\"" Oct 07 08:00:42 crc kubenswrapper[4875]: I1007 08:00:42.506398 4875 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/88313132-e652-4c64-b607-5b806c93e153-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Oct 07 08:00:42 crc kubenswrapper[4875]: I1007 08:00:42.506410 4875 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/88313132-e652-4c64-b607-5b806c93e153-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Oct 07 08:00:42 crc kubenswrapper[4875]: I1007 08:00:42.506429 4875 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/88313132-e652-4c64-b607-5b806c93e153-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Oct 07 08:00:42 crc kubenswrapper[4875]: I1007 08:00:42.506439 4875 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/88313132-e652-4c64-b607-5b806c93e153-audit-policies\") on node \"crc\" DevicePath \"\"" Oct 07 08:00:42 crc kubenswrapper[4875]: I1007 08:00:42.607840 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/d02f3b10-9127-41fa-ac79-09952b04bd74-v4-0-config-system-session\") pod \"oauth-openshift-66c8cdd865-dd5kg\" (UID: \"d02f3b10-9127-41fa-ac79-09952b04bd74\") " pod="openshift-authentication/oauth-openshift-66c8cdd865-dd5kg" Oct 07 08:00:42 crc kubenswrapper[4875]: I1007 08:00:42.607964 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/d02f3b10-9127-41fa-ac79-09952b04bd74-v4-0-config-system-cliconfig\") pod \"oauth-openshift-66c8cdd865-dd5kg\" (UID: \"d02f3b10-9127-41fa-ac79-09952b04bd74\") " pod="openshift-authentication/oauth-openshift-66c8cdd865-dd5kg" Oct 07 08:00:42 crc kubenswrapper[4875]: I1007 08:00:42.607989 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/d02f3b10-9127-41fa-ac79-09952b04bd74-v4-0-config-system-service-ca\") pod \"oauth-openshift-66c8cdd865-dd5kg\" (UID: \"d02f3b10-9127-41fa-ac79-09952b04bd74\") " pod="openshift-authentication/oauth-openshift-66c8cdd865-dd5kg" Oct 07 08:00:42 crc kubenswrapper[4875]: I1007 08:00:42.608813 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/d02f3b10-9127-41fa-ac79-09952b04bd74-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-66c8cdd865-dd5kg\" (UID: \"d02f3b10-9127-41fa-ac79-09952b04bd74\") " pod="openshift-authentication/oauth-openshift-66c8cdd865-dd5kg" Oct 07 08:00:42 crc kubenswrapper[4875]: I1007 08:00:42.609003 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/d02f3b10-9127-41fa-ac79-09952b04bd74-v4-0-config-system-cliconfig\") pod \"oauth-openshift-66c8cdd865-dd5kg\" (UID: \"d02f3b10-9127-41fa-ac79-09952b04bd74\") " pod="openshift-authentication/oauth-openshift-66c8cdd865-dd5kg" Oct 07 08:00:42 crc kubenswrapper[4875]: I1007 08:00:42.609053 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/d02f3b10-9127-41fa-ac79-09952b04bd74-v4-0-config-user-template-error\") pod \"oauth-openshift-66c8cdd865-dd5kg\" (UID: \"d02f3b10-9127-41fa-ac79-09952b04bd74\") " pod="openshift-authentication/oauth-openshift-66c8cdd865-dd5kg" Oct 07 08:00:42 crc kubenswrapper[4875]: I1007 08:00:42.609108 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/d02f3b10-9127-41fa-ac79-09952b04bd74-v4-0-config-system-router-certs\") pod \"oauth-openshift-66c8cdd865-dd5kg\" (UID: \"d02f3b10-9127-41fa-ac79-09952b04bd74\") " pod="openshift-authentication/oauth-openshift-66c8cdd865-dd5kg" Oct 07 08:00:42 crc kubenswrapper[4875]: I1007 08:00:42.609160 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/d02f3b10-9127-41fa-ac79-09952b04bd74-v4-0-config-system-service-ca\") pod \"oauth-openshift-66c8cdd865-dd5kg\" (UID: \"d02f3b10-9127-41fa-ac79-09952b04bd74\") " pod="openshift-authentication/oauth-openshift-66c8cdd865-dd5kg" Oct 07 08:00:42 crc kubenswrapper[4875]: I1007 08:00:42.609223 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/d02f3b10-9127-41fa-ac79-09952b04bd74-audit-dir\") pod \"oauth-openshift-66c8cdd865-dd5kg\" (UID: \"d02f3b10-9127-41fa-ac79-09952b04bd74\") " pod="openshift-authentication/oauth-openshift-66c8cdd865-dd5kg" Oct 07 08:00:42 crc kubenswrapper[4875]: I1007 08:00:42.609269 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p5t4s\" (UniqueName: \"kubernetes.io/projected/d02f3b10-9127-41fa-ac79-09952b04bd74-kube-api-access-p5t4s\") pod \"oauth-openshift-66c8cdd865-dd5kg\" (UID: \"d02f3b10-9127-41fa-ac79-09952b04bd74\") " pod="openshift-authentication/oauth-openshift-66c8cdd865-dd5kg" Oct 07 08:00:42 crc kubenswrapper[4875]: I1007 08:00:42.609298 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/d02f3b10-9127-41fa-ac79-09952b04bd74-v4-0-config-system-serving-cert\") pod \"oauth-openshift-66c8cdd865-dd5kg\" (UID: \"d02f3b10-9127-41fa-ac79-09952b04bd74\") " pod="openshift-authentication/oauth-openshift-66c8cdd865-dd5kg" Oct 07 08:00:42 crc kubenswrapper[4875]: I1007 08:00:42.609344 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/d02f3b10-9127-41fa-ac79-09952b04bd74-v4-0-config-user-template-login\") pod \"oauth-openshift-66c8cdd865-dd5kg\" (UID: \"d02f3b10-9127-41fa-ac79-09952b04bd74\") " pod="openshift-authentication/oauth-openshift-66c8cdd865-dd5kg" Oct 07 08:00:42 crc kubenswrapper[4875]: I1007 08:00:42.609355 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/d02f3b10-9127-41fa-ac79-09952b04bd74-audit-dir\") pod \"oauth-openshift-66c8cdd865-dd5kg\" (UID: \"d02f3b10-9127-41fa-ac79-09952b04bd74\") " pod="openshift-authentication/oauth-openshift-66c8cdd865-dd5kg" Oct 07 08:00:42 crc kubenswrapper[4875]: I1007 08:00:42.609378 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/d02f3b10-9127-41fa-ac79-09952b04bd74-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-66c8cdd865-dd5kg\" (UID: \"d02f3b10-9127-41fa-ac79-09952b04bd74\") " pod="openshift-authentication/oauth-openshift-66c8cdd865-dd5kg" Oct 07 08:00:42 crc kubenswrapper[4875]: I1007 08:00:42.609423 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/d02f3b10-9127-41fa-ac79-09952b04bd74-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-66c8cdd865-dd5kg\" (UID: \"d02f3b10-9127-41fa-ac79-09952b04bd74\") " pod="openshift-authentication/oauth-openshift-66c8cdd865-dd5kg" Oct 07 08:00:42 crc kubenswrapper[4875]: I1007 08:00:42.609448 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/d02f3b10-9127-41fa-ac79-09952b04bd74-audit-policies\") pod \"oauth-openshift-66c8cdd865-dd5kg\" (UID: \"d02f3b10-9127-41fa-ac79-09952b04bd74\") " pod="openshift-authentication/oauth-openshift-66c8cdd865-dd5kg" Oct 07 08:00:42 crc kubenswrapper[4875]: I1007 08:00:42.609620 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/d02f3b10-9127-41fa-ac79-09952b04bd74-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-66c8cdd865-dd5kg\" (UID: \"d02f3b10-9127-41fa-ac79-09952b04bd74\") " pod="openshift-authentication/oauth-openshift-66c8cdd865-dd5kg" Oct 07 08:00:42 crc kubenswrapper[4875]: I1007 08:00:42.609670 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/d02f3b10-9127-41fa-ac79-09952b04bd74-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-66c8cdd865-dd5kg\" (UID: \"d02f3b10-9127-41fa-ac79-09952b04bd74\") " pod="openshift-authentication/oauth-openshift-66c8cdd865-dd5kg" Oct 07 08:00:42 crc kubenswrapper[4875]: I1007 08:00:42.610361 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/d02f3b10-9127-41fa-ac79-09952b04bd74-audit-policies\") pod \"oauth-openshift-66c8cdd865-dd5kg\" (UID: \"d02f3b10-9127-41fa-ac79-09952b04bd74\") " pod="openshift-authentication/oauth-openshift-66c8cdd865-dd5kg" Oct 07 08:00:42 crc kubenswrapper[4875]: I1007 08:00:42.613650 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/d02f3b10-9127-41fa-ac79-09952b04bd74-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-66c8cdd865-dd5kg\" (UID: \"d02f3b10-9127-41fa-ac79-09952b04bd74\") " pod="openshift-authentication/oauth-openshift-66c8cdd865-dd5kg" Oct 07 08:00:42 crc kubenswrapper[4875]: I1007 08:00:42.613658 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/d02f3b10-9127-41fa-ac79-09952b04bd74-v4-0-config-system-router-certs\") pod \"oauth-openshift-66c8cdd865-dd5kg\" (UID: \"d02f3b10-9127-41fa-ac79-09952b04bd74\") " pod="openshift-authentication/oauth-openshift-66c8cdd865-dd5kg" Oct 07 08:00:42 crc kubenswrapper[4875]: I1007 08:00:42.613680 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/d02f3b10-9127-41fa-ac79-09952b04bd74-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-66c8cdd865-dd5kg\" (UID: \"d02f3b10-9127-41fa-ac79-09952b04bd74\") " pod="openshift-authentication/oauth-openshift-66c8cdd865-dd5kg" Oct 07 08:00:42 crc kubenswrapper[4875]: I1007 08:00:42.613689 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/d02f3b10-9127-41fa-ac79-09952b04bd74-v4-0-config-user-template-error\") pod \"oauth-openshift-66c8cdd865-dd5kg\" (UID: \"d02f3b10-9127-41fa-ac79-09952b04bd74\") " pod="openshift-authentication/oauth-openshift-66c8cdd865-dd5kg" Oct 07 08:00:42 crc kubenswrapper[4875]: I1007 08:00:42.613778 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/d02f3b10-9127-41fa-ac79-09952b04bd74-v4-0-config-system-serving-cert\") pod \"oauth-openshift-66c8cdd865-dd5kg\" (UID: \"d02f3b10-9127-41fa-ac79-09952b04bd74\") " pod="openshift-authentication/oauth-openshift-66c8cdd865-dd5kg" Oct 07 08:00:42 crc kubenswrapper[4875]: I1007 08:00:42.613991 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/d02f3b10-9127-41fa-ac79-09952b04bd74-v4-0-config-user-template-login\") pod \"oauth-openshift-66c8cdd865-dd5kg\" (UID: \"d02f3b10-9127-41fa-ac79-09952b04bd74\") " pod="openshift-authentication/oauth-openshift-66c8cdd865-dd5kg" Oct 07 08:00:42 crc kubenswrapper[4875]: I1007 08:00:42.614026 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/d02f3b10-9127-41fa-ac79-09952b04bd74-v4-0-config-system-session\") pod \"oauth-openshift-66c8cdd865-dd5kg\" (UID: \"d02f3b10-9127-41fa-ac79-09952b04bd74\") " pod="openshift-authentication/oauth-openshift-66c8cdd865-dd5kg" Oct 07 08:00:42 crc kubenswrapper[4875]: I1007 08:00:42.616105 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/d02f3b10-9127-41fa-ac79-09952b04bd74-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-66c8cdd865-dd5kg\" (UID: \"d02f3b10-9127-41fa-ac79-09952b04bd74\") " pod="openshift-authentication/oauth-openshift-66c8cdd865-dd5kg" Oct 07 08:00:42 crc kubenswrapper[4875]: I1007 08:00:42.627389 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p5t4s\" (UniqueName: \"kubernetes.io/projected/d02f3b10-9127-41fa-ac79-09952b04bd74-kube-api-access-p5t4s\") pod \"oauth-openshift-66c8cdd865-dd5kg\" (UID: \"d02f3b10-9127-41fa-ac79-09952b04bd74\") " pod="openshift-authentication/oauth-openshift-66c8cdd865-dd5kg" Oct 07 08:00:42 crc kubenswrapper[4875]: I1007 08:00:42.696448 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-66c8cdd865-dd5kg" Oct 07 08:00:42 crc kubenswrapper[4875]: I1007 08:00:42.903128 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-66c8cdd865-dd5kg"] Oct 07 08:00:43 crc kubenswrapper[4875]: I1007 08:00:43.175935 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-66c8cdd865-dd5kg" event={"ID":"d02f3b10-9127-41fa-ac79-09952b04bd74","Type":"ContainerStarted","Data":"1c1cfb5f34a3ff9ff12462c83ff87550c6fe32316829966e57c3f0ac12357cfe"} Oct 07 08:00:43 crc kubenswrapper[4875]: I1007 08:00:43.178161 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-hlnjg" event={"ID":"88313132-e652-4c64-b607-5b806c93e153","Type":"ContainerDied","Data":"c024e8a40266b2286c733e08a9fa62bbec22ce7621d979a4bd68074081ed0352"} Oct 07 08:00:43 crc kubenswrapper[4875]: I1007 08:00:43.178198 4875 scope.go:117] "RemoveContainer" containerID="495ab7dbc39dfacf2e885b92ae0f1104e2fad2c188a1261e5f4e700504f53d8a" Oct 07 08:00:43 crc kubenswrapper[4875]: I1007 08:00:43.178254 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-hlnjg" Oct 07 08:00:43 crc kubenswrapper[4875]: I1007 08:00:43.218251 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-hlnjg"] Oct 07 08:00:43 crc kubenswrapper[4875]: I1007 08:00:43.221250 4875 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-hlnjg"] Oct 07 08:00:43 crc kubenswrapper[4875]: I1007 08:00:43.705702 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="88313132-e652-4c64-b607-5b806c93e153" path="/var/lib/kubelet/pods/88313132-e652-4c64-b607-5b806c93e153/volumes" Oct 07 08:00:44 crc kubenswrapper[4875]: I1007 08:00:44.183479 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-66c8cdd865-dd5kg" event={"ID":"d02f3b10-9127-41fa-ac79-09952b04bd74","Type":"ContainerStarted","Data":"95a1952b84f9ee2d59c820bc16397faed12588247c4199909a71d8cf56b56587"} Oct 07 08:00:44 crc kubenswrapper[4875]: I1007 08:00:44.183711 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-66c8cdd865-dd5kg" Oct 07 08:00:44 crc kubenswrapper[4875]: I1007 08:00:44.188853 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-66c8cdd865-dd5kg" Oct 07 08:00:44 crc kubenswrapper[4875]: I1007 08:00:44.205710 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-66c8cdd865-dd5kg" podStartSLOduration=28.205691234 podStartE2EDuration="28.205691234s" podCreationTimestamp="2025-10-07 08:00:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 08:00:44.20177074 +0000 UTC m=+269.161541293" watchObservedRunningTime="2025-10-07 08:00:44.205691234 +0000 UTC m=+269.165461777" Oct 07 08:01:02 crc kubenswrapper[4875]: I1007 08:01:02.008656 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-bqwr8"] Oct 07 08:01:02 crc kubenswrapper[4875]: I1007 08:01:02.009518 4875 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-bqwr8" podUID="09dd69c8-1a2a-40d8-8087-a8e895ba97ee" containerName="registry-server" containerID="cri-o://246c2eaf5329404e537edac66be50592c0e3ac1b6397f5bfe78a2540b7e7c8d5" gracePeriod=30 Oct 07 08:01:02 crc kubenswrapper[4875]: I1007 08:01:02.020081 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-htjvv"] Oct 07 08:01:02 crc kubenswrapper[4875]: I1007 08:01:02.020674 4875 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-htjvv" podUID="62c991b5-7e34-484b-8346-2db6f586c972" containerName="registry-server" containerID="cri-o://b89b92e77d90201a214aba6bcd0c627362ae486a88c6f4bf15d7f607fe9dc4ea" gracePeriod=30 Oct 07 08:01:02 crc kubenswrapper[4875]: I1007 08:01:02.037609 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-j9tjw"] Oct 07 08:01:02 crc kubenswrapper[4875]: I1007 08:01:02.038177 4875 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/marketplace-operator-79b997595-j9tjw" podUID="83061376-ca6f-4ccc-8da0-bede4a497b4f" containerName="marketplace-operator" containerID="cri-o://c752782e23754b4b0e11f4e7c85183900c60495b30945ab2879cc0541f704f29" gracePeriod=30 Oct 07 08:01:02 crc kubenswrapper[4875]: I1007 08:01:02.051220 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-kjd46"] Oct 07 08:01:02 crc kubenswrapper[4875]: I1007 08:01:02.052384 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-kjd46" Oct 07 08:01:02 crc kubenswrapper[4875]: I1007 08:01:02.055657 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-6kwkt"] Oct 07 08:01:02 crc kubenswrapper[4875]: I1007 08:01:02.055977 4875 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-6kwkt" podUID="f4b26103-9634-4ff7-b3bd-d3819c51643f" containerName="registry-server" containerID="cri-o://2cacab925991935d1a41004e16c433d5cd7d4bf75821219fd92f29b99b064c24" gracePeriod=30 Oct 07 08:01:02 crc kubenswrapper[4875]: I1007 08:01:02.071321 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-mlrfc"] Oct 07 08:01:02 crc kubenswrapper[4875]: I1007 08:01:02.071626 4875 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-mlrfc" podUID="e2939977-2c4d-49a5-9aaf-a9ff78f7925f" containerName="registry-server" containerID="cri-o://7ce38aa67a3ff40bb714b93a87b904a528ddd7d8fd21335bd27100616ffdcc02" gracePeriod=30 Oct 07 08:01:02 crc kubenswrapper[4875]: I1007 08:01:02.077594 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-kjd46"] Oct 07 08:01:02 crc kubenswrapper[4875]: I1007 08:01:02.219792 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/193bb790-d155-4aee-9f31-41b457c429ca-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-kjd46\" (UID: \"193bb790-d155-4aee-9f31-41b457c429ca\") " pod="openshift-marketplace/marketplace-operator-79b997595-kjd46" Oct 07 08:01:02 crc kubenswrapper[4875]: I1007 08:01:02.219859 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lt74g\" (UniqueName: \"kubernetes.io/projected/193bb790-d155-4aee-9f31-41b457c429ca-kube-api-access-lt74g\") pod \"marketplace-operator-79b997595-kjd46\" (UID: \"193bb790-d155-4aee-9f31-41b457c429ca\") " pod="openshift-marketplace/marketplace-operator-79b997595-kjd46" Oct 07 08:01:02 crc kubenswrapper[4875]: I1007 08:01:02.219925 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/193bb790-d155-4aee-9f31-41b457c429ca-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-kjd46\" (UID: \"193bb790-d155-4aee-9f31-41b457c429ca\") " pod="openshift-marketplace/marketplace-operator-79b997595-kjd46" Oct 07 08:01:02 crc kubenswrapper[4875]: I1007 08:01:02.307591 4875 generic.go:334] "Generic (PLEG): container finished" podID="09dd69c8-1a2a-40d8-8087-a8e895ba97ee" containerID="246c2eaf5329404e537edac66be50592c0e3ac1b6397f5bfe78a2540b7e7c8d5" exitCode=0 Oct 07 08:01:02 crc kubenswrapper[4875]: I1007 08:01:02.308276 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-bqwr8" event={"ID":"09dd69c8-1a2a-40d8-8087-a8e895ba97ee","Type":"ContainerDied","Data":"246c2eaf5329404e537edac66be50592c0e3ac1b6397f5bfe78a2540b7e7c8d5"} Oct 07 08:01:02 crc kubenswrapper[4875]: I1007 08:01:02.320356 4875 generic.go:334] "Generic (PLEG): container finished" podID="62c991b5-7e34-484b-8346-2db6f586c972" containerID="b89b92e77d90201a214aba6bcd0c627362ae486a88c6f4bf15d7f607fe9dc4ea" exitCode=0 Oct 07 08:01:02 crc kubenswrapper[4875]: I1007 08:01:02.320444 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-htjvv" event={"ID":"62c991b5-7e34-484b-8346-2db6f586c972","Type":"ContainerDied","Data":"b89b92e77d90201a214aba6bcd0c627362ae486a88c6f4bf15d7f607fe9dc4ea"} Oct 07 08:01:02 crc kubenswrapper[4875]: I1007 08:01:02.320957 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lt74g\" (UniqueName: \"kubernetes.io/projected/193bb790-d155-4aee-9f31-41b457c429ca-kube-api-access-lt74g\") pod \"marketplace-operator-79b997595-kjd46\" (UID: \"193bb790-d155-4aee-9f31-41b457c429ca\") " pod="openshift-marketplace/marketplace-operator-79b997595-kjd46" Oct 07 08:01:02 crc kubenswrapper[4875]: I1007 08:01:02.321012 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/193bb790-d155-4aee-9f31-41b457c429ca-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-kjd46\" (UID: \"193bb790-d155-4aee-9f31-41b457c429ca\") " pod="openshift-marketplace/marketplace-operator-79b997595-kjd46" Oct 07 08:01:02 crc kubenswrapper[4875]: I1007 08:01:02.321101 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/193bb790-d155-4aee-9f31-41b457c429ca-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-kjd46\" (UID: \"193bb790-d155-4aee-9f31-41b457c429ca\") " pod="openshift-marketplace/marketplace-operator-79b997595-kjd46" Oct 07 08:01:02 crc kubenswrapper[4875]: I1007 08:01:02.322973 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/193bb790-d155-4aee-9f31-41b457c429ca-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-kjd46\" (UID: \"193bb790-d155-4aee-9f31-41b457c429ca\") " pod="openshift-marketplace/marketplace-operator-79b997595-kjd46" Oct 07 08:01:02 crc kubenswrapper[4875]: I1007 08:01:02.330034 4875 generic.go:334] "Generic (PLEG): container finished" podID="e2939977-2c4d-49a5-9aaf-a9ff78f7925f" containerID="7ce38aa67a3ff40bb714b93a87b904a528ddd7d8fd21335bd27100616ffdcc02" exitCode=0 Oct 07 08:01:02 crc kubenswrapper[4875]: I1007 08:01:02.330129 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mlrfc" event={"ID":"e2939977-2c4d-49a5-9aaf-a9ff78f7925f","Type":"ContainerDied","Data":"7ce38aa67a3ff40bb714b93a87b904a528ddd7d8fd21335bd27100616ffdcc02"} Oct 07 08:01:02 crc kubenswrapper[4875]: I1007 08:01:02.330486 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/193bb790-d155-4aee-9f31-41b457c429ca-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-kjd46\" (UID: \"193bb790-d155-4aee-9f31-41b457c429ca\") " pod="openshift-marketplace/marketplace-operator-79b997595-kjd46" Oct 07 08:01:02 crc kubenswrapper[4875]: I1007 08:01:02.339146 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lt74g\" (UniqueName: \"kubernetes.io/projected/193bb790-d155-4aee-9f31-41b457c429ca-kube-api-access-lt74g\") pod \"marketplace-operator-79b997595-kjd46\" (UID: \"193bb790-d155-4aee-9f31-41b457c429ca\") " pod="openshift-marketplace/marketplace-operator-79b997595-kjd46" Oct 07 08:01:02 crc kubenswrapper[4875]: I1007 08:01:02.341306 4875 generic.go:334] "Generic (PLEG): container finished" podID="f4b26103-9634-4ff7-b3bd-d3819c51643f" containerID="2cacab925991935d1a41004e16c433d5cd7d4bf75821219fd92f29b99b064c24" exitCode=0 Oct 07 08:01:02 crc kubenswrapper[4875]: I1007 08:01:02.341376 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6kwkt" event={"ID":"f4b26103-9634-4ff7-b3bd-d3819c51643f","Type":"ContainerDied","Data":"2cacab925991935d1a41004e16c433d5cd7d4bf75821219fd92f29b99b064c24"} Oct 07 08:01:02 crc kubenswrapper[4875]: I1007 08:01:02.343532 4875 generic.go:334] "Generic (PLEG): container finished" podID="83061376-ca6f-4ccc-8da0-bede4a497b4f" containerID="c752782e23754b4b0e11f4e7c85183900c60495b30945ab2879cc0541f704f29" exitCode=0 Oct 07 08:01:02 crc kubenswrapper[4875]: I1007 08:01:02.343561 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-j9tjw" event={"ID":"83061376-ca6f-4ccc-8da0-bede4a497b4f","Type":"ContainerDied","Data":"c752782e23754b4b0e11f4e7c85183900c60495b30945ab2879cc0541f704f29"} Oct 07 08:01:02 crc kubenswrapper[4875]: I1007 08:01:02.375729 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-kjd46" Oct 07 08:01:02 crc kubenswrapper[4875]: I1007 08:01:02.502545 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-htjvv" Oct 07 08:01:02 crc kubenswrapper[4875]: I1007 08:01:02.502668 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-j9tjw" Oct 07 08:01:02 crc kubenswrapper[4875]: I1007 08:01:02.517028 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-bqwr8" Oct 07 08:01:02 crc kubenswrapper[4875]: I1007 08:01:02.542573 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-mlrfc" Oct 07 08:01:02 crc kubenswrapper[4875]: I1007 08:01:02.592991 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-6kwkt" Oct 07 08:01:02 crc kubenswrapper[4875]: I1007 08:01:02.626356 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e2939977-2c4d-49a5-9aaf-a9ff78f7925f-catalog-content\") pod \"e2939977-2c4d-49a5-9aaf-a9ff78f7925f\" (UID: \"e2939977-2c4d-49a5-9aaf-a9ff78f7925f\") " Oct 07 08:01:02 crc kubenswrapper[4875]: I1007 08:01:02.626414 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c2xfw\" (UniqueName: \"kubernetes.io/projected/e2939977-2c4d-49a5-9aaf-a9ff78f7925f-kube-api-access-c2xfw\") pod \"e2939977-2c4d-49a5-9aaf-a9ff78f7925f\" (UID: \"e2939977-2c4d-49a5-9aaf-a9ff78f7925f\") " Oct 07 08:01:02 crc kubenswrapper[4875]: I1007 08:01:02.626442 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nsp8f\" (UniqueName: \"kubernetes.io/projected/62c991b5-7e34-484b-8346-2db6f586c972-kube-api-access-nsp8f\") pod \"62c991b5-7e34-484b-8346-2db6f586c972\" (UID: \"62c991b5-7e34-484b-8346-2db6f586c972\") " Oct 07 08:01:02 crc kubenswrapper[4875]: I1007 08:01:02.626468 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k2pvh\" (UniqueName: \"kubernetes.io/projected/83061376-ca6f-4ccc-8da0-bede4a497b4f-kube-api-access-k2pvh\") pod \"83061376-ca6f-4ccc-8da0-bede4a497b4f\" (UID: \"83061376-ca6f-4ccc-8da0-bede4a497b4f\") " Oct 07 08:01:02 crc kubenswrapper[4875]: I1007 08:01:02.626491 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sdh6h\" (UniqueName: \"kubernetes.io/projected/09dd69c8-1a2a-40d8-8087-a8e895ba97ee-kube-api-access-sdh6h\") pod \"09dd69c8-1a2a-40d8-8087-a8e895ba97ee\" (UID: \"09dd69c8-1a2a-40d8-8087-a8e895ba97ee\") " Oct 07 08:01:02 crc kubenswrapper[4875]: I1007 08:01:02.626528 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/09dd69c8-1a2a-40d8-8087-a8e895ba97ee-utilities\") pod \"09dd69c8-1a2a-40d8-8087-a8e895ba97ee\" (UID: \"09dd69c8-1a2a-40d8-8087-a8e895ba97ee\") " Oct 07 08:01:02 crc kubenswrapper[4875]: I1007 08:01:02.626559 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/62c991b5-7e34-484b-8346-2db6f586c972-utilities\") pod \"62c991b5-7e34-484b-8346-2db6f586c972\" (UID: \"62c991b5-7e34-484b-8346-2db6f586c972\") " Oct 07 08:01:02 crc kubenswrapper[4875]: I1007 08:01:02.626587 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/09dd69c8-1a2a-40d8-8087-a8e895ba97ee-catalog-content\") pod \"09dd69c8-1a2a-40d8-8087-a8e895ba97ee\" (UID: \"09dd69c8-1a2a-40d8-8087-a8e895ba97ee\") " Oct 07 08:01:02 crc kubenswrapper[4875]: I1007 08:01:02.626603 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e2939977-2c4d-49a5-9aaf-a9ff78f7925f-utilities\") pod \"e2939977-2c4d-49a5-9aaf-a9ff78f7925f\" (UID: \"e2939977-2c4d-49a5-9aaf-a9ff78f7925f\") " Oct 07 08:01:02 crc kubenswrapper[4875]: I1007 08:01:02.626626 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/83061376-ca6f-4ccc-8da0-bede4a497b4f-marketplace-trusted-ca\") pod \"83061376-ca6f-4ccc-8da0-bede4a497b4f\" (UID: \"83061376-ca6f-4ccc-8da0-bede4a497b4f\") " Oct 07 08:01:02 crc kubenswrapper[4875]: I1007 08:01:02.626648 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/83061376-ca6f-4ccc-8da0-bede4a497b4f-marketplace-operator-metrics\") pod \"83061376-ca6f-4ccc-8da0-bede4a497b4f\" (UID: \"83061376-ca6f-4ccc-8da0-bede4a497b4f\") " Oct 07 08:01:02 crc kubenswrapper[4875]: I1007 08:01:02.626674 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/62c991b5-7e34-484b-8346-2db6f586c972-catalog-content\") pod \"62c991b5-7e34-484b-8346-2db6f586c972\" (UID: \"62c991b5-7e34-484b-8346-2db6f586c972\") " Oct 07 08:01:02 crc kubenswrapper[4875]: I1007 08:01:02.627996 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/09dd69c8-1a2a-40d8-8087-a8e895ba97ee-utilities" (OuterVolumeSpecName: "utilities") pod "09dd69c8-1a2a-40d8-8087-a8e895ba97ee" (UID: "09dd69c8-1a2a-40d8-8087-a8e895ba97ee"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 08:01:02 crc kubenswrapper[4875]: I1007 08:01:02.628171 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e2939977-2c4d-49a5-9aaf-a9ff78f7925f-utilities" (OuterVolumeSpecName: "utilities") pod "e2939977-2c4d-49a5-9aaf-a9ff78f7925f" (UID: "e2939977-2c4d-49a5-9aaf-a9ff78f7925f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 08:01:02 crc kubenswrapper[4875]: I1007 08:01:02.628764 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/83061376-ca6f-4ccc-8da0-bede4a497b4f-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "83061376-ca6f-4ccc-8da0-bede4a497b4f" (UID: "83061376-ca6f-4ccc-8da0-bede4a497b4f"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 08:01:02 crc kubenswrapper[4875]: I1007 08:01:02.632937 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/83061376-ca6f-4ccc-8da0-bede4a497b4f-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "83061376-ca6f-4ccc-8da0-bede4a497b4f" (UID: "83061376-ca6f-4ccc-8da0-bede4a497b4f"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:01:02 crc kubenswrapper[4875]: I1007 08:01:02.633098 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/62c991b5-7e34-484b-8346-2db6f586c972-kube-api-access-nsp8f" (OuterVolumeSpecName: "kube-api-access-nsp8f") pod "62c991b5-7e34-484b-8346-2db6f586c972" (UID: "62c991b5-7e34-484b-8346-2db6f586c972"). InnerVolumeSpecName "kube-api-access-nsp8f". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 08:01:02 crc kubenswrapper[4875]: I1007 08:01:02.633554 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e2939977-2c4d-49a5-9aaf-a9ff78f7925f-kube-api-access-c2xfw" (OuterVolumeSpecName: "kube-api-access-c2xfw") pod "e2939977-2c4d-49a5-9aaf-a9ff78f7925f" (UID: "e2939977-2c4d-49a5-9aaf-a9ff78f7925f"). InnerVolumeSpecName "kube-api-access-c2xfw". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 08:01:02 crc kubenswrapper[4875]: I1007 08:01:02.640159 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09dd69c8-1a2a-40d8-8087-a8e895ba97ee-kube-api-access-sdh6h" (OuterVolumeSpecName: "kube-api-access-sdh6h") pod "09dd69c8-1a2a-40d8-8087-a8e895ba97ee" (UID: "09dd69c8-1a2a-40d8-8087-a8e895ba97ee"). InnerVolumeSpecName "kube-api-access-sdh6h". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 08:01:02 crc kubenswrapper[4875]: I1007 08:01:02.643402 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/62c991b5-7e34-484b-8346-2db6f586c972-utilities" (OuterVolumeSpecName: "utilities") pod "62c991b5-7e34-484b-8346-2db6f586c972" (UID: "62c991b5-7e34-484b-8346-2db6f586c972"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 08:01:02 crc kubenswrapper[4875]: I1007 08:01:02.644583 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/83061376-ca6f-4ccc-8da0-bede4a497b4f-kube-api-access-k2pvh" (OuterVolumeSpecName: "kube-api-access-k2pvh") pod "83061376-ca6f-4ccc-8da0-bede4a497b4f" (UID: "83061376-ca6f-4ccc-8da0-bede4a497b4f"). InnerVolumeSpecName "kube-api-access-k2pvh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 08:01:02 crc kubenswrapper[4875]: I1007 08:01:02.701493 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-kjd46"] Oct 07 08:01:02 crc kubenswrapper[4875]: I1007 08:01:02.716336 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/62c991b5-7e34-484b-8346-2db6f586c972-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "62c991b5-7e34-484b-8346-2db6f586c972" (UID: "62c991b5-7e34-484b-8346-2db6f586c972"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 08:01:02 crc kubenswrapper[4875]: I1007 08:01:02.727950 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f4b26103-9634-4ff7-b3bd-d3819c51643f-utilities\") pod \"f4b26103-9634-4ff7-b3bd-d3819c51643f\" (UID: \"f4b26103-9634-4ff7-b3bd-d3819c51643f\") " Oct 07 08:01:02 crc kubenswrapper[4875]: I1007 08:01:02.728060 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f4b26103-9634-4ff7-b3bd-d3819c51643f-catalog-content\") pod \"f4b26103-9634-4ff7-b3bd-d3819c51643f\" (UID: \"f4b26103-9634-4ff7-b3bd-d3819c51643f\") " Oct 07 08:01:02 crc kubenswrapper[4875]: I1007 08:01:02.728173 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-956mx\" (UniqueName: \"kubernetes.io/projected/f4b26103-9634-4ff7-b3bd-d3819c51643f-kube-api-access-956mx\") pod \"f4b26103-9634-4ff7-b3bd-d3819c51643f\" (UID: \"f4b26103-9634-4ff7-b3bd-d3819c51643f\") " Oct 07 08:01:02 crc kubenswrapper[4875]: I1007 08:01:02.728466 4875 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/62c991b5-7e34-484b-8346-2db6f586c972-utilities\") on node \"crc\" DevicePath \"\"" Oct 07 08:01:02 crc kubenswrapper[4875]: I1007 08:01:02.728483 4875 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e2939977-2c4d-49a5-9aaf-a9ff78f7925f-utilities\") on node \"crc\" DevicePath \"\"" Oct 07 08:01:02 crc kubenswrapper[4875]: I1007 08:01:02.728496 4875 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/83061376-ca6f-4ccc-8da0-bede4a497b4f-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Oct 07 08:01:02 crc kubenswrapper[4875]: I1007 08:01:02.728508 4875 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/83061376-ca6f-4ccc-8da0-bede4a497b4f-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Oct 07 08:01:02 crc kubenswrapper[4875]: I1007 08:01:02.728520 4875 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/62c991b5-7e34-484b-8346-2db6f586c972-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 07 08:01:02 crc kubenswrapper[4875]: I1007 08:01:02.728530 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c2xfw\" (UniqueName: \"kubernetes.io/projected/e2939977-2c4d-49a5-9aaf-a9ff78f7925f-kube-api-access-c2xfw\") on node \"crc\" DevicePath \"\"" Oct 07 08:01:02 crc kubenswrapper[4875]: I1007 08:01:02.728541 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nsp8f\" (UniqueName: \"kubernetes.io/projected/62c991b5-7e34-484b-8346-2db6f586c972-kube-api-access-nsp8f\") on node \"crc\" DevicePath \"\"" Oct 07 08:01:02 crc kubenswrapper[4875]: I1007 08:01:02.728550 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k2pvh\" (UniqueName: \"kubernetes.io/projected/83061376-ca6f-4ccc-8da0-bede4a497b4f-kube-api-access-k2pvh\") on node \"crc\" DevicePath \"\"" Oct 07 08:01:02 crc kubenswrapper[4875]: I1007 08:01:02.728560 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sdh6h\" (UniqueName: \"kubernetes.io/projected/09dd69c8-1a2a-40d8-8087-a8e895ba97ee-kube-api-access-sdh6h\") on node \"crc\" DevicePath \"\"" Oct 07 08:01:02 crc kubenswrapper[4875]: I1007 08:01:02.728569 4875 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/09dd69c8-1a2a-40d8-8087-a8e895ba97ee-utilities\") on node \"crc\" DevicePath \"\"" Oct 07 08:01:02 crc kubenswrapper[4875]: I1007 08:01:02.729832 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f4b26103-9634-4ff7-b3bd-d3819c51643f-utilities" (OuterVolumeSpecName: "utilities") pod "f4b26103-9634-4ff7-b3bd-d3819c51643f" (UID: "f4b26103-9634-4ff7-b3bd-d3819c51643f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 08:01:02 crc kubenswrapper[4875]: I1007 08:01:02.730571 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/09dd69c8-1a2a-40d8-8087-a8e895ba97ee-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "09dd69c8-1a2a-40d8-8087-a8e895ba97ee" (UID: "09dd69c8-1a2a-40d8-8087-a8e895ba97ee"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 08:01:02 crc kubenswrapper[4875]: I1007 08:01:02.739918 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f4b26103-9634-4ff7-b3bd-d3819c51643f-kube-api-access-956mx" (OuterVolumeSpecName: "kube-api-access-956mx") pod "f4b26103-9634-4ff7-b3bd-d3819c51643f" (UID: "f4b26103-9634-4ff7-b3bd-d3819c51643f"). InnerVolumeSpecName "kube-api-access-956mx". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 08:01:02 crc kubenswrapper[4875]: I1007 08:01:02.747447 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f4b26103-9634-4ff7-b3bd-d3819c51643f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f4b26103-9634-4ff7-b3bd-d3819c51643f" (UID: "f4b26103-9634-4ff7-b3bd-d3819c51643f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 08:01:02 crc kubenswrapper[4875]: I1007 08:01:02.769271 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e2939977-2c4d-49a5-9aaf-a9ff78f7925f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e2939977-2c4d-49a5-9aaf-a9ff78f7925f" (UID: "e2939977-2c4d-49a5-9aaf-a9ff78f7925f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 08:01:02 crc kubenswrapper[4875]: I1007 08:01:02.829458 4875 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f4b26103-9634-4ff7-b3bd-d3819c51643f-utilities\") on node \"crc\" DevicePath \"\"" Oct 07 08:01:02 crc kubenswrapper[4875]: I1007 08:01:02.830043 4875 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f4b26103-9634-4ff7-b3bd-d3819c51643f-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 07 08:01:02 crc kubenswrapper[4875]: I1007 08:01:02.830058 4875 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e2939977-2c4d-49a5-9aaf-a9ff78f7925f-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 07 08:01:02 crc kubenswrapper[4875]: I1007 08:01:02.830069 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-956mx\" (UniqueName: \"kubernetes.io/projected/f4b26103-9634-4ff7-b3bd-d3819c51643f-kube-api-access-956mx\") on node \"crc\" DevicePath \"\"" Oct 07 08:01:02 crc kubenswrapper[4875]: I1007 08:01:02.830083 4875 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/09dd69c8-1a2a-40d8-8087-a8e895ba97ee-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 07 08:01:03 crc kubenswrapper[4875]: I1007 08:01:03.351509 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-htjvv" event={"ID":"62c991b5-7e34-484b-8346-2db6f586c972","Type":"ContainerDied","Data":"9e4e9dfca91935a3002f398d89fa237ca85f46355d7aa924ec137f8e2001078b"} Oct 07 08:01:03 crc kubenswrapper[4875]: I1007 08:01:03.351596 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-htjvv" Oct 07 08:01:03 crc kubenswrapper[4875]: I1007 08:01:03.353006 4875 scope.go:117] "RemoveContainer" containerID="b89b92e77d90201a214aba6bcd0c627362ae486a88c6f4bf15d7f607fe9dc4ea" Oct 07 08:01:03 crc kubenswrapper[4875]: I1007 08:01:03.355548 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mlrfc" event={"ID":"e2939977-2c4d-49a5-9aaf-a9ff78f7925f","Type":"ContainerDied","Data":"9ba5bd8617351748d0509eeaa880771801903dd9e72b49382c08b94087ee6222"} Oct 07 08:01:03 crc kubenswrapper[4875]: I1007 08:01:03.355794 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-mlrfc" Oct 07 08:01:03 crc kubenswrapper[4875]: I1007 08:01:03.358588 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-kjd46" event={"ID":"193bb790-d155-4aee-9f31-41b457c429ca","Type":"ContainerStarted","Data":"5ac2a85d3f8850f8f26a0c513fe4d0fee2086447b3df9f1211e476046a317d84"} Oct 07 08:01:03 crc kubenswrapper[4875]: I1007 08:01:03.358635 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-kjd46" event={"ID":"193bb790-d155-4aee-9f31-41b457c429ca","Type":"ContainerStarted","Data":"44857d0953264cac63ad8634d7ad100193edffee433c5b9a9228e8819cfd54a6"} Oct 07 08:01:03 crc kubenswrapper[4875]: I1007 08:01:03.359100 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-kjd46" Oct 07 08:01:03 crc kubenswrapper[4875]: I1007 08:01:03.363331 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6kwkt" event={"ID":"f4b26103-9634-4ff7-b3bd-d3819c51643f","Type":"ContainerDied","Data":"3ded4d63236df673038ce0e74497fa1d29d5e4f4f51f410eb4a4505170972902"} Oct 07 08:01:03 crc kubenswrapper[4875]: I1007 08:01:03.363416 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-6kwkt" Oct 07 08:01:03 crc kubenswrapper[4875]: I1007 08:01:03.364794 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-j9tjw" event={"ID":"83061376-ca6f-4ccc-8da0-bede4a497b4f","Type":"ContainerDied","Data":"f5ee1f28e21c4fb7581f67be934a729d904b0d956e1269307a084fce4ee66b4c"} Oct 07 08:01:03 crc kubenswrapper[4875]: I1007 08:01:03.364838 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-j9tjw" Oct 07 08:01:03 crc kubenswrapper[4875]: I1007 08:01:03.365745 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-kjd46" Oct 07 08:01:03 crc kubenswrapper[4875]: I1007 08:01:03.374791 4875 scope.go:117] "RemoveContainer" containerID="430ee1689a2d2a4bcd8f6dfed404dd5bf5066cae9fa53ddfa847d17bc3f70684" Oct 07 08:01:03 crc kubenswrapper[4875]: I1007 08:01:03.380975 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-bqwr8" event={"ID":"09dd69c8-1a2a-40d8-8087-a8e895ba97ee","Type":"ContainerDied","Data":"a2af927e01a51e2aa491a1f46157152c369daf0372cd852ee164d0171d15985b"} Oct 07 08:01:03 crc kubenswrapper[4875]: I1007 08:01:03.380999 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-bqwr8" Oct 07 08:01:03 crc kubenswrapper[4875]: I1007 08:01:03.389208 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-kjd46" podStartSLOduration=1.38918468 podStartE2EDuration="1.38918468s" podCreationTimestamp="2025-10-07 08:01:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 08:01:03.385499654 +0000 UTC m=+288.345270217" watchObservedRunningTime="2025-10-07 08:01:03.38918468 +0000 UTC m=+288.348955223" Oct 07 08:01:03 crc kubenswrapper[4875]: I1007 08:01:03.417421 4875 scope.go:117] "RemoveContainer" containerID="506685b334cc3689a22fb550d5fc22f85d86835cdb2b5a7475eee7b1df4cfb01" Oct 07 08:01:03 crc kubenswrapper[4875]: I1007 08:01:03.433652 4875 scope.go:117] "RemoveContainer" containerID="7ce38aa67a3ff40bb714b93a87b904a528ddd7d8fd21335bd27100616ffdcc02" Oct 07 08:01:03 crc kubenswrapper[4875]: I1007 08:01:03.467062 4875 scope.go:117] "RemoveContainer" containerID="fc7761140bcb281339461b717b9bc5fafff95d3c3ff28a1ced4152b2c6253a12" Oct 07 08:01:03 crc kubenswrapper[4875]: I1007 08:01:03.467925 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-htjvv"] Oct 07 08:01:03 crc kubenswrapper[4875]: I1007 08:01:03.473906 4875 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-htjvv"] Oct 07 08:01:03 crc kubenswrapper[4875]: I1007 08:01:03.488732 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-mlrfc"] Oct 07 08:01:03 crc kubenswrapper[4875]: I1007 08:01:03.490132 4875 scope.go:117] "RemoveContainer" containerID="6f795231dcfee118da6fe7afa9dcac0a77f8b35329ae8f30997dcac7c0a4af62" Oct 07 08:01:03 crc kubenswrapper[4875]: I1007 08:01:03.492380 4875 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-mlrfc"] Oct 07 08:01:03 crc kubenswrapper[4875]: I1007 08:01:03.507274 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-j9tjw"] Oct 07 08:01:03 crc kubenswrapper[4875]: I1007 08:01:03.508381 4875 scope.go:117] "RemoveContainer" containerID="2cacab925991935d1a41004e16c433d5cd7d4bf75821219fd92f29b99b064c24" Oct 07 08:01:03 crc kubenswrapper[4875]: I1007 08:01:03.516054 4875 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-j9tjw"] Oct 07 08:01:03 crc kubenswrapper[4875]: I1007 08:01:03.521195 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-bqwr8"] Oct 07 08:01:03 crc kubenswrapper[4875]: I1007 08:01:03.529640 4875 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-bqwr8"] Oct 07 08:01:03 crc kubenswrapper[4875]: I1007 08:01:03.532074 4875 scope.go:117] "RemoveContainer" containerID="0962e76645e67f318ae108ec1c8e370c0f034884ac49caa993f6b5fe0693e74e" Oct 07 08:01:03 crc kubenswrapper[4875]: I1007 08:01:03.534539 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-6kwkt"] Oct 07 08:01:03 crc kubenswrapper[4875]: I1007 08:01:03.541067 4875 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-6kwkt"] Oct 07 08:01:03 crc kubenswrapper[4875]: I1007 08:01:03.550936 4875 scope.go:117] "RemoveContainer" containerID="174491dcc2d08bdf4e4811579de940579248e98041de12c366f64c3b19d7b412" Oct 07 08:01:03 crc kubenswrapper[4875]: I1007 08:01:03.569040 4875 scope.go:117] "RemoveContainer" containerID="c752782e23754b4b0e11f4e7c85183900c60495b30945ab2879cc0541f704f29" Oct 07 08:01:03 crc kubenswrapper[4875]: I1007 08:01:03.582858 4875 scope.go:117] "RemoveContainer" containerID="246c2eaf5329404e537edac66be50592c0e3ac1b6397f5bfe78a2540b7e7c8d5" Oct 07 08:01:03 crc kubenswrapper[4875]: I1007 08:01:03.597080 4875 scope.go:117] "RemoveContainer" containerID="86f23fbd1a933f0638a0616ab3c8a72fea0722d61cd101b7f1dcbe7f4d2be039" Oct 07 08:01:03 crc kubenswrapper[4875]: I1007 08:01:03.612134 4875 scope.go:117] "RemoveContainer" containerID="d8583d7e078b9b81c3cc8e7b27a601effee4e42c1e14a0f3ff83e156dd4d8a51" Oct 07 08:01:03 crc kubenswrapper[4875]: I1007 08:01:03.704701 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09dd69c8-1a2a-40d8-8087-a8e895ba97ee" path="/var/lib/kubelet/pods/09dd69c8-1a2a-40d8-8087-a8e895ba97ee/volumes" Oct 07 08:01:03 crc kubenswrapper[4875]: I1007 08:01:03.705714 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="62c991b5-7e34-484b-8346-2db6f586c972" path="/var/lib/kubelet/pods/62c991b5-7e34-484b-8346-2db6f586c972/volumes" Oct 07 08:01:03 crc kubenswrapper[4875]: I1007 08:01:03.706409 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="83061376-ca6f-4ccc-8da0-bede4a497b4f" path="/var/lib/kubelet/pods/83061376-ca6f-4ccc-8da0-bede4a497b4f/volumes" Oct 07 08:01:03 crc kubenswrapper[4875]: I1007 08:01:03.710394 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e2939977-2c4d-49a5-9aaf-a9ff78f7925f" path="/var/lib/kubelet/pods/e2939977-2c4d-49a5-9aaf-a9ff78f7925f/volumes" Oct 07 08:01:03 crc kubenswrapper[4875]: I1007 08:01:03.712201 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f4b26103-9634-4ff7-b3bd-d3819c51643f" path="/var/lib/kubelet/pods/f4b26103-9634-4ff7-b3bd-d3819c51643f/volumes" Oct 07 08:01:04 crc kubenswrapper[4875]: I1007 08:01:04.235535 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-ndzbj"] Oct 07 08:01:04 crc kubenswrapper[4875]: E1007 08:01:04.235823 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="09dd69c8-1a2a-40d8-8087-a8e895ba97ee" containerName="registry-server" Oct 07 08:01:04 crc kubenswrapper[4875]: I1007 08:01:04.235841 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="09dd69c8-1a2a-40d8-8087-a8e895ba97ee" containerName="registry-server" Oct 07 08:01:04 crc kubenswrapper[4875]: E1007 08:01:04.235854 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="62c991b5-7e34-484b-8346-2db6f586c972" containerName="extract-content" Oct 07 08:01:04 crc kubenswrapper[4875]: I1007 08:01:04.235859 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="62c991b5-7e34-484b-8346-2db6f586c972" containerName="extract-content" Oct 07 08:01:04 crc kubenswrapper[4875]: E1007 08:01:04.235869 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="83061376-ca6f-4ccc-8da0-bede4a497b4f" containerName="marketplace-operator" Oct 07 08:01:04 crc kubenswrapper[4875]: I1007 08:01:04.235895 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="83061376-ca6f-4ccc-8da0-bede4a497b4f" containerName="marketplace-operator" Oct 07 08:01:04 crc kubenswrapper[4875]: E1007 08:01:04.235903 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e2939977-2c4d-49a5-9aaf-a9ff78f7925f" containerName="extract-utilities" Oct 07 08:01:04 crc kubenswrapper[4875]: I1007 08:01:04.235911 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="e2939977-2c4d-49a5-9aaf-a9ff78f7925f" containerName="extract-utilities" Oct 07 08:01:04 crc kubenswrapper[4875]: E1007 08:01:04.235917 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e2939977-2c4d-49a5-9aaf-a9ff78f7925f" containerName="extract-content" Oct 07 08:01:04 crc kubenswrapper[4875]: I1007 08:01:04.235923 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="e2939977-2c4d-49a5-9aaf-a9ff78f7925f" containerName="extract-content" Oct 07 08:01:04 crc kubenswrapper[4875]: E1007 08:01:04.235929 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b26103-9634-4ff7-b3bd-d3819c51643f" containerName="extract-content" Oct 07 08:01:04 crc kubenswrapper[4875]: I1007 08:01:04.235935 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b26103-9634-4ff7-b3bd-d3819c51643f" containerName="extract-content" Oct 07 08:01:04 crc kubenswrapper[4875]: E1007 08:01:04.235944 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b26103-9634-4ff7-b3bd-d3819c51643f" containerName="extract-utilities" Oct 07 08:01:04 crc kubenswrapper[4875]: I1007 08:01:04.235950 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b26103-9634-4ff7-b3bd-d3819c51643f" containerName="extract-utilities" Oct 07 08:01:04 crc kubenswrapper[4875]: E1007 08:01:04.235959 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="09dd69c8-1a2a-40d8-8087-a8e895ba97ee" containerName="extract-content" Oct 07 08:01:04 crc kubenswrapper[4875]: I1007 08:01:04.235965 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="09dd69c8-1a2a-40d8-8087-a8e895ba97ee" containerName="extract-content" Oct 07 08:01:04 crc kubenswrapper[4875]: E1007 08:01:04.235976 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e2939977-2c4d-49a5-9aaf-a9ff78f7925f" containerName="registry-server" Oct 07 08:01:04 crc kubenswrapper[4875]: I1007 08:01:04.235983 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="e2939977-2c4d-49a5-9aaf-a9ff78f7925f" containerName="registry-server" Oct 07 08:01:04 crc kubenswrapper[4875]: E1007 08:01:04.235992 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="09dd69c8-1a2a-40d8-8087-a8e895ba97ee" containerName="extract-utilities" Oct 07 08:01:04 crc kubenswrapper[4875]: I1007 08:01:04.235998 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="09dd69c8-1a2a-40d8-8087-a8e895ba97ee" containerName="extract-utilities" Oct 07 08:01:04 crc kubenswrapper[4875]: E1007 08:01:04.236006 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b26103-9634-4ff7-b3bd-d3819c51643f" containerName="registry-server" Oct 07 08:01:04 crc kubenswrapper[4875]: I1007 08:01:04.236011 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b26103-9634-4ff7-b3bd-d3819c51643f" containerName="registry-server" Oct 07 08:01:04 crc kubenswrapper[4875]: E1007 08:01:04.236019 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="62c991b5-7e34-484b-8346-2db6f586c972" containerName="extract-utilities" Oct 07 08:01:04 crc kubenswrapper[4875]: I1007 08:01:04.236026 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="62c991b5-7e34-484b-8346-2db6f586c972" containerName="extract-utilities" Oct 07 08:01:04 crc kubenswrapper[4875]: E1007 08:01:04.236032 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="62c991b5-7e34-484b-8346-2db6f586c972" containerName="registry-server" Oct 07 08:01:04 crc kubenswrapper[4875]: I1007 08:01:04.236037 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="62c991b5-7e34-484b-8346-2db6f586c972" containerName="registry-server" Oct 07 08:01:04 crc kubenswrapper[4875]: I1007 08:01:04.236123 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="83061376-ca6f-4ccc-8da0-bede4a497b4f" containerName="marketplace-operator" Oct 07 08:01:04 crc kubenswrapper[4875]: I1007 08:01:04.236135 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="e2939977-2c4d-49a5-9aaf-a9ff78f7925f" containerName="registry-server" Oct 07 08:01:04 crc kubenswrapper[4875]: I1007 08:01:04.236145 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="62c991b5-7e34-484b-8346-2db6f586c972" containerName="registry-server" Oct 07 08:01:04 crc kubenswrapper[4875]: I1007 08:01:04.236151 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="09dd69c8-1a2a-40d8-8087-a8e895ba97ee" containerName="registry-server" Oct 07 08:01:04 crc kubenswrapper[4875]: I1007 08:01:04.236159 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b26103-9634-4ff7-b3bd-d3819c51643f" containerName="registry-server" Oct 07 08:01:04 crc kubenswrapper[4875]: I1007 08:01:04.238799 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-ndzbj" Oct 07 08:01:04 crc kubenswrapper[4875]: I1007 08:01:04.242379 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Oct 07 08:01:04 crc kubenswrapper[4875]: I1007 08:01:04.245256 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-ndzbj"] Oct 07 08:01:04 crc kubenswrapper[4875]: I1007 08:01:04.357334 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a0195b31-f7a7-4146-8e5c-a11661371eda-catalog-content\") pod \"certified-operators-ndzbj\" (UID: \"a0195b31-f7a7-4146-8e5c-a11661371eda\") " pod="openshift-marketplace/certified-operators-ndzbj" Oct 07 08:01:04 crc kubenswrapper[4875]: I1007 08:01:04.357870 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a0195b31-f7a7-4146-8e5c-a11661371eda-utilities\") pod \"certified-operators-ndzbj\" (UID: \"a0195b31-f7a7-4146-8e5c-a11661371eda\") " pod="openshift-marketplace/certified-operators-ndzbj" Oct 07 08:01:04 crc kubenswrapper[4875]: I1007 08:01:04.357920 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7dtqs\" (UniqueName: \"kubernetes.io/projected/a0195b31-f7a7-4146-8e5c-a11661371eda-kube-api-access-7dtqs\") pod \"certified-operators-ndzbj\" (UID: \"a0195b31-f7a7-4146-8e5c-a11661371eda\") " pod="openshift-marketplace/certified-operators-ndzbj" Oct 07 08:01:04 crc kubenswrapper[4875]: I1007 08:01:04.435759 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-9fmxk"] Oct 07 08:01:04 crc kubenswrapper[4875]: I1007 08:01:04.437852 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-9fmxk" Oct 07 08:01:04 crc kubenswrapper[4875]: I1007 08:01:04.445477 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-9fmxk"] Oct 07 08:01:04 crc kubenswrapper[4875]: I1007 08:01:04.447203 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Oct 07 08:01:04 crc kubenswrapper[4875]: I1007 08:01:04.459942 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a0195b31-f7a7-4146-8e5c-a11661371eda-utilities\") pod \"certified-operators-ndzbj\" (UID: \"a0195b31-f7a7-4146-8e5c-a11661371eda\") " pod="openshift-marketplace/certified-operators-ndzbj" Oct 07 08:01:04 crc kubenswrapper[4875]: I1007 08:01:04.459987 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7dtqs\" (UniqueName: \"kubernetes.io/projected/a0195b31-f7a7-4146-8e5c-a11661371eda-kube-api-access-7dtqs\") pod \"certified-operators-ndzbj\" (UID: \"a0195b31-f7a7-4146-8e5c-a11661371eda\") " pod="openshift-marketplace/certified-operators-ndzbj" Oct 07 08:01:04 crc kubenswrapper[4875]: I1007 08:01:04.460056 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a0195b31-f7a7-4146-8e5c-a11661371eda-catalog-content\") pod \"certified-operators-ndzbj\" (UID: \"a0195b31-f7a7-4146-8e5c-a11661371eda\") " pod="openshift-marketplace/certified-operators-ndzbj" Oct 07 08:01:04 crc kubenswrapper[4875]: I1007 08:01:04.460520 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a0195b31-f7a7-4146-8e5c-a11661371eda-catalog-content\") pod \"certified-operators-ndzbj\" (UID: \"a0195b31-f7a7-4146-8e5c-a11661371eda\") " pod="openshift-marketplace/certified-operators-ndzbj" Oct 07 08:01:04 crc kubenswrapper[4875]: I1007 08:01:04.460789 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a0195b31-f7a7-4146-8e5c-a11661371eda-utilities\") pod \"certified-operators-ndzbj\" (UID: \"a0195b31-f7a7-4146-8e5c-a11661371eda\") " pod="openshift-marketplace/certified-operators-ndzbj" Oct 07 08:01:04 crc kubenswrapper[4875]: I1007 08:01:04.491141 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7dtqs\" (UniqueName: \"kubernetes.io/projected/a0195b31-f7a7-4146-8e5c-a11661371eda-kube-api-access-7dtqs\") pod \"certified-operators-ndzbj\" (UID: \"a0195b31-f7a7-4146-8e5c-a11661371eda\") " pod="openshift-marketplace/certified-operators-ndzbj" Oct 07 08:01:04 crc kubenswrapper[4875]: I1007 08:01:04.560997 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a5460abe-f279-4a96-bb7b-6389750640bb-utilities\") pod \"community-operators-9fmxk\" (UID: \"a5460abe-f279-4a96-bb7b-6389750640bb\") " pod="openshift-marketplace/community-operators-9fmxk" Oct 07 08:01:04 crc kubenswrapper[4875]: I1007 08:01:04.561255 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a5460abe-f279-4a96-bb7b-6389750640bb-catalog-content\") pod \"community-operators-9fmxk\" (UID: \"a5460abe-f279-4a96-bb7b-6389750640bb\") " pod="openshift-marketplace/community-operators-9fmxk" Oct 07 08:01:04 crc kubenswrapper[4875]: I1007 08:01:04.561440 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ktx2n\" (UniqueName: \"kubernetes.io/projected/a5460abe-f279-4a96-bb7b-6389750640bb-kube-api-access-ktx2n\") pod \"community-operators-9fmxk\" (UID: \"a5460abe-f279-4a96-bb7b-6389750640bb\") " pod="openshift-marketplace/community-operators-9fmxk" Oct 07 08:01:04 crc kubenswrapper[4875]: I1007 08:01:04.569972 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-ndzbj" Oct 07 08:01:04 crc kubenswrapper[4875]: I1007 08:01:04.663123 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ktx2n\" (UniqueName: \"kubernetes.io/projected/a5460abe-f279-4a96-bb7b-6389750640bb-kube-api-access-ktx2n\") pod \"community-operators-9fmxk\" (UID: \"a5460abe-f279-4a96-bb7b-6389750640bb\") " pod="openshift-marketplace/community-operators-9fmxk" Oct 07 08:01:04 crc kubenswrapper[4875]: I1007 08:01:04.663198 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a5460abe-f279-4a96-bb7b-6389750640bb-utilities\") pod \"community-operators-9fmxk\" (UID: \"a5460abe-f279-4a96-bb7b-6389750640bb\") " pod="openshift-marketplace/community-operators-9fmxk" Oct 07 08:01:04 crc kubenswrapper[4875]: I1007 08:01:04.663237 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a5460abe-f279-4a96-bb7b-6389750640bb-catalog-content\") pod \"community-operators-9fmxk\" (UID: \"a5460abe-f279-4a96-bb7b-6389750640bb\") " pod="openshift-marketplace/community-operators-9fmxk" Oct 07 08:01:04 crc kubenswrapper[4875]: I1007 08:01:04.663726 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a5460abe-f279-4a96-bb7b-6389750640bb-catalog-content\") pod \"community-operators-9fmxk\" (UID: \"a5460abe-f279-4a96-bb7b-6389750640bb\") " pod="openshift-marketplace/community-operators-9fmxk" Oct 07 08:01:04 crc kubenswrapper[4875]: I1007 08:01:04.663849 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a5460abe-f279-4a96-bb7b-6389750640bb-utilities\") pod \"community-operators-9fmxk\" (UID: \"a5460abe-f279-4a96-bb7b-6389750640bb\") " pod="openshift-marketplace/community-operators-9fmxk" Oct 07 08:01:04 crc kubenswrapper[4875]: I1007 08:01:04.694569 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ktx2n\" (UniqueName: \"kubernetes.io/projected/a5460abe-f279-4a96-bb7b-6389750640bb-kube-api-access-ktx2n\") pod \"community-operators-9fmxk\" (UID: \"a5460abe-f279-4a96-bb7b-6389750640bb\") " pod="openshift-marketplace/community-operators-9fmxk" Oct 07 08:01:04 crc kubenswrapper[4875]: I1007 08:01:04.773374 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-9fmxk" Oct 07 08:01:04 crc kubenswrapper[4875]: I1007 08:01:04.972016 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-9fmxk"] Oct 07 08:01:04 crc kubenswrapper[4875]: W1007 08:01:04.984227 4875 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda5460abe_f279_4a96_bb7b_6389750640bb.slice/crio-cfb1a1438b3023d85f04efb34ee508647b78f515a843f86b15459611017ee09e WatchSource:0}: Error finding container cfb1a1438b3023d85f04efb34ee508647b78f515a843f86b15459611017ee09e: Status 404 returned error can't find the container with id cfb1a1438b3023d85f04efb34ee508647b78f515a843f86b15459611017ee09e Oct 07 08:01:05 crc kubenswrapper[4875]: I1007 08:01:05.001575 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-ndzbj"] Oct 07 08:01:05 crc kubenswrapper[4875]: W1007 08:01:05.010808 4875 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda0195b31_f7a7_4146_8e5c_a11661371eda.slice/crio-8756ca28f4ff11d5b740de933331bbb4381b7b8ca7bb500cc467e467c93ee65e WatchSource:0}: Error finding container 8756ca28f4ff11d5b740de933331bbb4381b7b8ca7bb500cc467e467c93ee65e: Status 404 returned error can't find the container with id 8756ca28f4ff11d5b740de933331bbb4381b7b8ca7bb500cc467e467c93ee65e Oct 07 08:01:05 crc kubenswrapper[4875]: I1007 08:01:05.405484 4875 generic.go:334] "Generic (PLEG): container finished" podID="a5460abe-f279-4a96-bb7b-6389750640bb" containerID="2b68b97951d2001ddcb6d23b61405bda80840068a0516d25c48438f24eb373ac" exitCode=0 Oct 07 08:01:05 crc kubenswrapper[4875]: I1007 08:01:05.405604 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9fmxk" event={"ID":"a5460abe-f279-4a96-bb7b-6389750640bb","Type":"ContainerDied","Data":"2b68b97951d2001ddcb6d23b61405bda80840068a0516d25c48438f24eb373ac"} Oct 07 08:01:05 crc kubenswrapper[4875]: I1007 08:01:05.405640 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9fmxk" event={"ID":"a5460abe-f279-4a96-bb7b-6389750640bb","Type":"ContainerStarted","Data":"cfb1a1438b3023d85f04efb34ee508647b78f515a843f86b15459611017ee09e"} Oct 07 08:01:05 crc kubenswrapper[4875]: I1007 08:01:05.408107 4875 generic.go:334] "Generic (PLEG): container finished" podID="a0195b31-f7a7-4146-8e5c-a11661371eda" containerID="3c2724069aae1bad86722a6b2fc616be323cfbb503277ef493098947828a3252" exitCode=0 Oct 07 08:01:05 crc kubenswrapper[4875]: I1007 08:01:05.408502 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ndzbj" event={"ID":"a0195b31-f7a7-4146-8e5c-a11661371eda","Type":"ContainerDied","Data":"3c2724069aae1bad86722a6b2fc616be323cfbb503277ef493098947828a3252"} Oct 07 08:01:05 crc kubenswrapper[4875]: I1007 08:01:05.408821 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ndzbj" event={"ID":"a0195b31-f7a7-4146-8e5c-a11661371eda","Type":"ContainerStarted","Data":"8756ca28f4ff11d5b740de933331bbb4381b7b8ca7bb500cc467e467c93ee65e"} Oct 07 08:01:06 crc kubenswrapper[4875]: I1007 08:01:06.638933 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-jbrrn"] Oct 07 08:01:06 crc kubenswrapper[4875]: I1007 08:01:06.640394 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-jbrrn" Oct 07 08:01:06 crc kubenswrapper[4875]: I1007 08:01:06.642524 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Oct 07 08:01:06 crc kubenswrapper[4875]: I1007 08:01:06.656515 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-jbrrn"] Oct 07 08:01:06 crc kubenswrapper[4875]: I1007 08:01:06.791068 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bcc94b2c-ef4b-4a9b-ba01-17f23df3b946-catalog-content\") pod \"redhat-marketplace-jbrrn\" (UID: \"bcc94b2c-ef4b-4a9b-ba01-17f23df3b946\") " pod="openshift-marketplace/redhat-marketplace-jbrrn" Oct 07 08:01:06 crc kubenswrapper[4875]: I1007 08:01:06.791202 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bcc94b2c-ef4b-4a9b-ba01-17f23df3b946-utilities\") pod \"redhat-marketplace-jbrrn\" (UID: \"bcc94b2c-ef4b-4a9b-ba01-17f23df3b946\") " pod="openshift-marketplace/redhat-marketplace-jbrrn" Oct 07 08:01:06 crc kubenswrapper[4875]: I1007 08:01:06.791221 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d2hgg\" (UniqueName: \"kubernetes.io/projected/bcc94b2c-ef4b-4a9b-ba01-17f23df3b946-kube-api-access-d2hgg\") pod \"redhat-marketplace-jbrrn\" (UID: \"bcc94b2c-ef4b-4a9b-ba01-17f23df3b946\") " pod="openshift-marketplace/redhat-marketplace-jbrrn" Oct 07 08:01:06 crc kubenswrapper[4875]: I1007 08:01:06.834927 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-gfc5q"] Oct 07 08:01:06 crc kubenswrapper[4875]: I1007 08:01:06.836079 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-gfc5q" Oct 07 08:01:06 crc kubenswrapper[4875]: I1007 08:01:06.842036 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Oct 07 08:01:06 crc kubenswrapper[4875]: I1007 08:01:06.847901 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-gfc5q"] Oct 07 08:01:06 crc kubenswrapper[4875]: I1007 08:01:06.892490 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bcc94b2c-ef4b-4a9b-ba01-17f23df3b946-catalog-content\") pod \"redhat-marketplace-jbrrn\" (UID: \"bcc94b2c-ef4b-4a9b-ba01-17f23df3b946\") " pod="openshift-marketplace/redhat-marketplace-jbrrn" Oct 07 08:01:06 crc kubenswrapper[4875]: I1007 08:01:06.892782 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bcc94b2c-ef4b-4a9b-ba01-17f23df3b946-utilities\") pod \"redhat-marketplace-jbrrn\" (UID: \"bcc94b2c-ef4b-4a9b-ba01-17f23df3b946\") " pod="openshift-marketplace/redhat-marketplace-jbrrn" Oct 07 08:01:06 crc kubenswrapper[4875]: I1007 08:01:06.892945 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d2hgg\" (UniqueName: \"kubernetes.io/projected/bcc94b2c-ef4b-4a9b-ba01-17f23df3b946-kube-api-access-d2hgg\") pod \"redhat-marketplace-jbrrn\" (UID: \"bcc94b2c-ef4b-4a9b-ba01-17f23df3b946\") " pod="openshift-marketplace/redhat-marketplace-jbrrn" Oct 07 08:01:06 crc kubenswrapper[4875]: I1007 08:01:06.892945 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bcc94b2c-ef4b-4a9b-ba01-17f23df3b946-catalog-content\") pod \"redhat-marketplace-jbrrn\" (UID: \"bcc94b2c-ef4b-4a9b-ba01-17f23df3b946\") " pod="openshift-marketplace/redhat-marketplace-jbrrn" Oct 07 08:01:06 crc kubenswrapper[4875]: I1007 08:01:06.893363 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bcc94b2c-ef4b-4a9b-ba01-17f23df3b946-utilities\") pod \"redhat-marketplace-jbrrn\" (UID: \"bcc94b2c-ef4b-4a9b-ba01-17f23df3b946\") " pod="openshift-marketplace/redhat-marketplace-jbrrn" Oct 07 08:01:06 crc kubenswrapper[4875]: I1007 08:01:06.919619 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d2hgg\" (UniqueName: \"kubernetes.io/projected/bcc94b2c-ef4b-4a9b-ba01-17f23df3b946-kube-api-access-d2hgg\") pod \"redhat-marketplace-jbrrn\" (UID: \"bcc94b2c-ef4b-4a9b-ba01-17f23df3b946\") " pod="openshift-marketplace/redhat-marketplace-jbrrn" Oct 07 08:01:06 crc kubenswrapper[4875]: I1007 08:01:06.953742 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-jbrrn" Oct 07 08:01:07 crc kubenswrapper[4875]: I1007 08:01:07.011611 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rrvhg\" (UniqueName: \"kubernetes.io/projected/29a4e16e-a79d-4dd1-b06c-eeb70e66e974-kube-api-access-rrvhg\") pod \"redhat-operators-gfc5q\" (UID: \"29a4e16e-a79d-4dd1-b06c-eeb70e66e974\") " pod="openshift-marketplace/redhat-operators-gfc5q" Oct 07 08:01:07 crc kubenswrapper[4875]: I1007 08:01:07.011692 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/29a4e16e-a79d-4dd1-b06c-eeb70e66e974-catalog-content\") pod \"redhat-operators-gfc5q\" (UID: \"29a4e16e-a79d-4dd1-b06c-eeb70e66e974\") " pod="openshift-marketplace/redhat-operators-gfc5q" Oct 07 08:01:07 crc kubenswrapper[4875]: I1007 08:01:07.011719 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/29a4e16e-a79d-4dd1-b06c-eeb70e66e974-utilities\") pod \"redhat-operators-gfc5q\" (UID: \"29a4e16e-a79d-4dd1-b06c-eeb70e66e974\") " pod="openshift-marketplace/redhat-operators-gfc5q" Oct 07 08:01:07 crc kubenswrapper[4875]: I1007 08:01:07.113247 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rrvhg\" (UniqueName: \"kubernetes.io/projected/29a4e16e-a79d-4dd1-b06c-eeb70e66e974-kube-api-access-rrvhg\") pod \"redhat-operators-gfc5q\" (UID: \"29a4e16e-a79d-4dd1-b06c-eeb70e66e974\") " pod="openshift-marketplace/redhat-operators-gfc5q" Oct 07 08:01:07 crc kubenswrapper[4875]: I1007 08:01:07.113770 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/29a4e16e-a79d-4dd1-b06c-eeb70e66e974-catalog-content\") pod \"redhat-operators-gfc5q\" (UID: \"29a4e16e-a79d-4dd1-b06c-eeb70e66e974\") " pod="openshift-marketplace/redhat-operators-gfc5q" Oct 07 08:01:07 crc kubenswrapper[4875]: I1007 08:01:07.113824 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/29a4e16e-a79d-4dd1-b06c-eeb70e66e974-utilities\") pod \"redhat-operators-gfc5q\" (UID: \"29a4e16e-a79d-4dd1-b06c-eeb70e66e974\") " pod="openshift-marketplace/redhat-operators-gfc5q" Oct 07 08:01:07 crc kubenswrapper[4875]: I1007 08:01:07.115089 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/29a4e16e-a79d-4dd1-b06c-eeb70e66e974-utilities\") pod \"redhat-operators-gfc5q\" (UID: \"29a4e16e-a79d-4dd1-b06c-eeb70e66e974\") " pod="openshift-marketplace/redhat-operators-gfc5q" Oct 07 08:01:07 crc kubenswrapper[4875]: I1007 08:01:07.115312 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/29a4e16e-a79d-4dd1-b06c-eeb70e66e974-catalog-content\") pod \"redhat-operators-gfc5q\" (UID: \"29a4e16e-a79d-4dd1-b06c-eeb70e66e974\") " pod="openshift-marketplace/redhat-operators-gfc5q" Oct 07 08:01:07 crc kubenswrapper[4875]: I1007 08:01:07.139426 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rrvhg\" (UniqueName: \"kubernetes.io/projected/29a4e16e-a79d-4dd1-b06c-eeb70e66e974-kube-api-access-rrvhg\") pod \"redhat-operators-gfc5q\" (UID: \"29a4e16e-a79d-4dd1-b06c-eeb70e66e974\") " pod="openshift-marketplace/redhat-operators-gfc5q" Oct 07 08:01:07 crc kubenswrapper[4875]: I1007 08:01:07.183184 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-jbrrn"] Oct 07 08:01:07 crc kubenswrapper[4875]: I1007 08:01:07.211610 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-gfc5q" Oct 07 08:01:07 crc kubenswrapper[4875]: I1007 08:01:07.421341 4875 generic.go:334] "Generic (PLEG): container finished" podID="bcc94b2c-ef4b-4a9b-ba01-17f23df3b946" containerID="937731113be319925a478bce44e27e7c503605bfd043f5254d26a975046eb2ef" exitCode=0 Oct 07 08:01:07 crc kubenswrapper[4875]: I1007 08:01:07.421553 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jbrrn" event={"ID":"bcc94b2c-ef4b-4a9b-ba01-17f23df3b946","Type":"ContainerDied","Data":"937731113be319925a478bce44e27e7c503605bfd043f5254d26a975046eb2ef"} Oct 07 08:01:07 crc kubenswrapper[4875]: I1007 08:01:07.421795 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jbrrn" event={"ID":"bcc94b2c-ef4b-4a9b-ba01-17f23df3b946","Type":"ContainerStarted","Data":"1da8db5231dd62d1c6161b4748fbee33eb255f933af30403c2841e99dc978f83"} Oct 07 08:01:07 crc kubenswrapper[4875]: I1007 08:01:07.425590 4875 generic.go:334] "Generic (PLEG): container finished" podID="a5460abe-f279-4a96-bb7b-6389750640bb" containerID="45c544596f1f5e7bfc3fa8dd3ad7558a50bfd8c767cea08cef464e2ff45fc252" exitCode=0 Oct 07 08:01:07 crc kubenswrapper[4875]: I1007 08:01:07.425658 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9fmxk" event={"ID":"a5460abe-f279-4a96-bb7b-6389750640bb","Type":"ContainerDied","Data":"45c544596f1f5e7bfc3fa8dd3ad7558a50bfd8c767cea08cef464e2ff45fc252"} Oct 07 08:01:07 crc kubenswrapper[4875]: I1007 08:01:07.429641 4875 generic.go:334] "Generic (PLEG): container finished" podID="a0195b31-f7a7-4146-8e5c-a11661371eda" containerID="e9bb0b2708bbb78283b125578ec225bbb229e8f265443aaeaae86d129294b52d" exitCode=0 Oct 07 08:01:07 crc kubenswrapper[4875]: I1007 08:01:07.429666 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ndzbj" event={"ID":"a0195b31-f7a7-4146-8e5c-a11661371eda","Type":"ContainerDied","Data":"e9bb0b2708bbb78283b125578ec225bbb229e8f265443aaeaae86d129294b52d"} Oct 07 08:01:07 crc kubenswrapper[4875]: I1007 08:01:07.471237 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-gfc5q"] Oct 07 08:01:07 crc kubenswrapper[4875]: W1007 08:01:07.477101 4875 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod29a4e16e_a79d_4dd1_b06c_eeb70e66e974.slice/crio-d53987b1eb04258d6bca55d6243ba4b4254a72937be2e4cb1d3ae65d245353e1 WatchSource:0}: Error finding container d53987b1eb04258d6bca55d6243ba4b4254a72937be2e4cb1d3ae65d245353e1: Status 404 returned error can't find the container with id d53987b1eb04258d6bca55d6243ba4b4254a72937be2e4cb1d3ae65d245353e1 Oct 07 08:01:08 crc kubenswrapper[4875]: I1007 08:01:08.436246 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ndzbj" event={"ID":"a0195b31-f7a7-4146-8e5c-a11661371eda","Type":"ContainerStarted","Data":"196ad9db78bc7a0798f79dca9b9aa62a16b620964161fc5666de907dfd394965"} Oct 07 08:01:08 crc kubenswrapper[4875]: I1007 08:01:08.437573 4875 generic.go:334] "Generic (PLEG): container finished" podID="29a4e16e-a79d-4dd1-b06c-eeb70e66e974" containerID="189510f1aa650b3f41bbeaa53f0b65205ef00e48092e11596b550e3df47fa505" exitCode=0 Oct 07 08:01:08 crc kubenswrapper[4875]: I1007 08:01:08.437630 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gfc5q" event={"ID":"29a4e16e-a79d-4dd1-b06c-eeb70e66e974","Type":"ContainerDied","Data":"189510f1aa650b3f41bbeaa53f0b65205ef00e48092e11596b550e3df47fa505"} Oct 07 08:01:08 crc kubenswrapper[4875]: I1007 08:01:08.437683 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gfc5q" event={"ID":"29a4e16e-a79d-4dd1-b06c-eeb70e66e974","Type":"ContainerStarted","Data":"d53987b1eb04258d6bca55d6243ba4b4254a72937be2e4cb1d3ae65d245353e1"} Oct 07 08:01:08 crc kubenswrapper[4875]: I1007 08:01:08.454417 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-ndzbj" podStartSLOduration=1.973335934 podStartE2EDuration="4.454398233s" podCreationTimestamp="2025-10-07 08:01:04 +0000 UTC" firstStartedPulling="2025-10-07 08:01:05.410472986 +0000 UTC m=+290.370243529" lastFinishedPulling="2025-10-07 08:01:07.891535285 +0000 UTC m=+292.851305828" observedRunningTime="2025-10-07 08:01:08.453264971 +0000 UTC m=+293.413035544" watchObservedRunningTime="2025-10-07 08:01:08.454398233 +0000 UTC m=+293.414168776" Oct 07 08:01:09 crc kubenswrapper[4875]: I1007 08:01:09.446055 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gfc5q" event={"ID":"29a4e16e-a79d-4dd1-b06c-eeb70e66e974","Type":"ContainerStarted","Data":"96325bde7d8a78e41273569c3e77aff734d12e4df4bc0186e33b7fbd16dc9d53"} Oct 07 08:01:09 crc kubenswrapper[4875]: I1007 08:01:09.447680 4875 generic.go:334] "Generic (PLEG): container finished" podID="bcc94b2c-ef4b-4a9b-ba01-17f23df3b946" containerID="1479d2d91818cd09f8473cb4a5308b7f413f34eb81361553183bcb40fc71538a" exitCode=0 Oct 07 08:01:09 crc kubenswrapper[4875]: I1007 08:01:09.447749 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jbrrn" event={"ID":"bcc94b2c-ef4b-4a9b-ba01-17f23df3b946","Type":"ContainerDied","Data":"1479d2d91818cd09f8473cb4a5308b7f413f34eb81361553183bcb40fc71538a"} Oct 07 08:01:09 crc kubenswrapper[4875]: I1007 08:01:09.453305 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9fmxk" event={"ID":"a5460abe-f279-4a96-bb7b-6389750640bb","Type":"ContainerStarted","Data":"642a7e2af4244c8ce1d396cf297e3023b4498bc60656ed714a616adee990130c"} Oct 07 08:01:09 crc kubenswrapper[4875]: I1007 08:01:09.498413 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-9fmxk" podStartSLOduration=2.396721109 podStartE2EDuration="5.498394464s" podCreationTimestamp="2025-10-07 08:01:04 +0000 UTC" firstStartedPulling="2025-10-07 08:01:05.407652004 +0000 UTC m=+290.367422547" lastFinishedPulling="2025-10-07 08:01:08.509325319 +0000 UTC m=+293.469095902" observedRunningTime="2025-10-07 08:01:09.497714084 +0000 UTC m=+294.457484637" watchObservedRunningTime="2025-10-07 08:01:09.498394464 +0000 UTC m=+294.458165007" Oct 07 08:01:10 crc kubenswrapper[4875]: I1007 08:01:10.460694 4875 generic.go:334] "Generic (PLEG): container finished" podID="29a4e16e-a79d-4dd1-b06c-eeb70e66e974" containerID="96325bde7d8a78e41273569c3e77aff734d12e4df4bc0186e33b7fbd16dc9d53" exitCode=0 Oct 07 08:01:10 crc kubenswrapper[4875]: I1007 08:01:10.460778 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gfc5q" event={"ID":"29a4e16e-a79d-4dd1-b06c-eeb70e66e974","Type":"ContainerDied","Data":"96325bde7d8a78e41273569c3e77aff734d12e4df4bc0186e33b7fbd16dc9d53"} Oct 07 08:01:11 crc kubenswrapper[4875]: I1007 08:01:11.470058 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gfc5q" event={"ID":"29a4e16e-a79d-4dd1-b06c-eeb70e66e974","Type":"ContainerStarted","Data":"90ac5f7aa1e33d1595680d27047c907bc96a484480ac1d725f0959b9d3c675a2"} Oct 07 08:01:11 crc kubenswrapper[4875]: I1007 08:01:11.472925 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jbrrn" event={"ID":"bcc94b2c-ef4b-4a9b-ba01-17f23df3b946","Type":"ContainerStarted","Data":"3c85483ebb2cfec54be0a33575b2cf6e5162856463588581f79e6d624004b062"} Oct 07 08:01:11 crc kubenswrapper[4875]: I1007 08:01:11.488768 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-gfc5q" podStartSLOduration=3.05409213 podStartE2EDuration="5.488752222s" podCreationTimestamp="2025-10-07 08:01:06 +0000 UTC" firstStartedPulling="2025-10-07 08:01:08.453992601 +0000 UTC m=+293.413763144" lastFinishedPulling="2025-10-07 08:01:10.888652693 +0000 UTC m=+295.848423236" observedRunningTime="2025-10-07 08:01:11.488399231 +0000 UTC m=+296.448169784" watchObservedRunningTime="2025-10-07 08:01:11.488752222 +0000 UTC m=+296.448522765" Oct 07 08:01:11 crc kubenswrapper[4875]: I1007 08:01:11.510937 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-jbrrn" podStartSLOduration=3.078164929 podStartE2EDuration="5.510858874s" podCreationTimestamp="2025-10-07 08:01:06 +0000 UTC" firstStartedPulling="2025-10-07 08:01:07.423417209 +0000 UTC m=+292.383187752" lastFinishedPulling="2025-10-07 08:01:09.856111154 +0000 UTC m=+294.815881697" observedRunningTime="2025-10-07 08:01:11.510036301 +0000 UTC m=+296.469806844" watchObservedRunningTime="2025-10-07 08:01:11.510858874 +0000 UTC m=+296.470629417" Oct 07 08:01:14 crc kubenswrapper[4875]: I1007 08:01:14.571129 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-ndzbj" Oct 07 08:01:14 crc kubenswrapper[4875]: I1007 08:01:14.571552 4875 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-ndzbj" Oct 07 08:01:14 crc kubenswrapper[4875]: I1007 08:01:14.614162 4875 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-ndzbj" Oct 07 08:01:14 crc kubenswrapper[4875]: I1007 08:01:14.775037 4875 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-9fmxk" Oct 07 08:01:14 crc kubenswrapper[4875]: I1007 08:01:14.775143 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-9fmxk" Oct 07 08:01:14 crc kubenswrapper[4875]: I1007 08:01:14.826394 4875 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-9fmxk" Oct 07 08:01:15 crc kubenswrapper[4875]: I1007 08:01:15.533753 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-9fmxk" Oct 07 08:01:15 crc kubenswrapper[4875]: I1007 08:01:15.536311 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-ndzbj" Oct 07 08:01:16 crc kubenswrapper[4875]: I1007 08:01:16.954241 4875 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-jbrrn" Oct 07 08:01:16 crc kubenswrapper[4875]: I1007 08:01:16.955495 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-jbrrn" Oct 07 08:01:16 crc kubenswrapper[4875]: I1007 08:01:16.997451 4875 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-jbrrn" Oct 07 08:01:17 crc kubenswrapper[4875]: I1007 08:01:17.211783 4875 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-gfc5q" Oct 07 08:01:17 crc kubenswrapper[4875]: I1007 08:01:17.211842 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-gfc5q" Oct 07 08:01:17 crc kubenswrapper[4875]: I1007 08:01:17.251713 4875 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-gfc5q" Oct 07 08:01:17 crc kubenswrapper[4875]: I1007 08:01:17.587310 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-jbrrn" Oct 07 08:01:17 crc kubenswrapper[4875]: I1007 08:01:17.588490 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-gfc5q" Oct 07 08:02:01 crc kubenswrapper[4875]: I1007 08:02:01.221299 4875 patch_prober.go:28] interesting pod/machine-config-daemon-hx68m container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 07 08:02:01 crc kubenswrapper[4875]: I1007 08:02:01.222019 4875 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" podUID="3928c10c-c3da-41eb-96b2-629d67cfb31f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 07 08:02:31 crc kubenswrapper[4875]: I1007 08:02:31.221318 4875 patch_prober.go:28] interesting pod/machine-config-daemon-hx68m container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 07 08:02:31 crc kubenswrapper[4875]: I1007 08:02:31.222188 4875 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" podUID="3928c10c-c3da-41eb-96b2-629d67cfb31f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 07 08:02:58 crc kubenswrapper[4875]: I1007 08:02:58.997432 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-nzgsc"] Oct 07 08:02:58 crc kubenswrapper[4875]: I1007 08:02:58.999133 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-nzgsc" Oct 07 08:02:59 crc kubenswrapper[4875]: I1007 08:02:59.017844 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-nzgsc"] Oct 07 08:02:59 crc kubenswrapper[4875]: I1007 08:02:59.132633 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/0abe5722-1183-4128-8e1c-d8d348c04e77-bound-sa-token\") pod \"image-registry-66df7c8f76-nzgsc\" (UID: \"0abe5722-1183-4128-8e1c-d8d348c04e77\") " pod="openshift-image-registry/image-registry-66df7c8f76-nzgsc" Oct 07 08:02:59 crc kubenswrapper[4875]: I1007 08:02:59.132717 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xfkc7\" (UniqueName: \"kubernetes.io/projected/0abe5722-1183-4128-8e1c-d8d348c04e77-kube-api-access-xfkc7\") pod \"image-registry-66df7c8f76-nzgsc\" (UID: \"0abe5722-1183-4128-8e1c-d8d348c04e77\") " pod="openshift-image-registry/image-registry-66df7c8f76-nzgsc" Oct 07 08:02:59 crc kubenswrapper[4875]: I1007 08:02:59.132940 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-nzgsc\" (UID: \"0abe5722-1183-4128-8e1c-d8d348c04e77\") " pod="openshift-image-registry/image-registry-66df7c8f76-nzgsc" Oct 07 08:02:59 crc kubenswrapper[4875]: I1007 08:02:59.133094 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/0abe5722-1183-4128-8e1c-d8d348c04e77-registry-certificates\") pod \"image-registry-66df7c8f76-nzgsc\" (UID: \"0abe5722-1183-4128-8e1c-d8d348c04e77\") " pod="openshift-image-registry/image-registry-66df7c8f76-nzgsc" Oct 07 08:02:59 crc kubenswrapper[4875]: I1007 08:02:59.133137 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/0abe5722-1183-4128-8e1c-d8d348c04e77-ca-trust-extracted\") pod \"image-registry-66df7c8f76-nzgsc\" (UID: \"0abe5722-1183-4128-8e1c-d8d348c04e77\") " pod="openshift-image-registry/image-registry-66df7c8f76-nzgsc" Oct 07 08:02:59 crc kubenswrapper[4875]: I1007 08:02:59.133213 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/0abe5722-1183-4128-8e1c-d8d348c04e77-registry-tls\") pod \"image-registry-66df7c8f76-nzgsc\" (UID: \"0abe5722-1183-4128-8e1c-d8d348c04e77\") " pod="openshift-image-registry/image-registry-66df7c8f76-nzgsc" Oct 07 08:02:59 crc kubenswrapper[4875]: I1007 08:02:59.133252 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/0abe5722-1183-4128-8e1c-d8d348c04e77-trusted-ca\") pod \"image-registry-66df7c8f76-nzgsc\" (UID: \"0abe5722-1183-4128-8e1c-d8d348c04e77\") " pod="openshift-image-registry/image-registry-66df7c8f76-nzgsc" Oct 07 08:02:59 crc kubenswrapper[4875]: I1007 08:02:59.133377 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/0abe5722-1183-4128-8e1c-d8d348c04e77-installation-pull-secrets\") pod \"image-registry-66df7c8f76-nzgsc\" (UID: \"0abe5722-1183-4128-8e1c-d8d348c04e77\") " pod="openshift-image-registry/image-registry-66df7c8f76-nzgsc" Oct 07 08:02:59 crc kubenswrapper[4875]: I1007 08:02:59.155625 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-nzgsc\" (UID: \"0abe5722-1183-4128-8e1c-d8d348c04e77\") " pod="openshift-image-registry/image-registry-66df7c8f76-nzgsc" Oct 07 08:02:59 crc kubenswrapper[4875]: I1007 08:02:59.235114 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/0abe5722-1183-4128-8e1c-d8d348c04e77-bound-sa-token\") pod \"image-registry-66df7c8f76-nzgsc\" (UID: \"0abe5722-1183-4128-8e1c-d8d348c04e77\") " pod="openshift-image-registry/image-registry-66df7c8f76-nzgsc" Oct 07 08:02:59 crc kubenswrapper[4875]: I1007 08:02:59.235181 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xfkc7\" (UniqueName: \"kubernetes.io/projected/0abe5722-1183-4128-8e1c-d8d348c04e77-kube-api-access-xfkc7\") pod \"image-registry-66df7c8f76-nzgsc\" (UID: \"0abe5722-1183-4128-8e1c-d8d348c04e77\") " pod="openshift-image-registry/image-registry-66df7c8f76-nzgsc" Oct 07 08:02:59 crc kubenswrapper[4875]: I1007 08:02:59.235244 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/0abe5722-1183-4128-8e1c-d8d348c04e77-registry-certificates\") pod \"image-registry-66df7c8f76-nzgsc\" (UID: \"0abe5722-1183-4128-8e1c-d8d348c04e77\") " pod="openshift-image-registry/image-registry-66df7c8f76-nzgsc" Oct 07 08:02:59 crc kubenswrapper[4875]: I1007 08:02:59.235268 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/0abe5722-1183-4128-8e1c-d8d348c04e77-ca-trust-extracted\") pod \"image-registry-66df7c8f76-nzgsc\" (UID: \"0abe5722-1183-4128-8e1c-d8d348c04e77\") " pod="openshift-image-registry/image-registry-66df7c8f76-nzgsc" Oct 07 08:02:59 crc kubenswrapper[4875]: I1007 08:02:59.235310 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/0abe5722-1183-4128-8e1c-d8d348c04e77-registry-tls\") pod \"image-registry-66df7c8f76-nzgsc\" (UID: \"0abe5722-1183-4128-8e1c-d8d348c04e77\") " pod="openshift-image-registry/image-registry-66df7c8f76-nzgsc" Oct 07 08:02:59 crc kubenswrapper[4875]: I1007 08:02:59.235331 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/0abe5722-1183-4128-8e1c-d8d348c04e77-trusted-ca\") pod \"image-registry-66df7c8f76-nzgsc\" (UID: \"0abe5722-1183-4128-8e1c-d8d348c04e77\") " pod="openshift-image-registry/image-registry-66df7c8f76-nzgsc" Oct 07 08:02:59 crc kubenswrapper[4875]: I1007 08:02:59.235370 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/0abe5722-1183-4128-8e1c-d8d348c04e77-installation-pull-secrets\") pod \"image-registry-66df7c8f76-nzgsc\" (UID: \"0abe5722-1183-4128-8e1c-d8d348c04e77\") " pod="openshift-image-registry/image-registry-66df7c8f76-nzgsc" Oct 07 08:02:59 crc kubenswrapper[4875]: I1007 08:02:59.236273 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/0abe5722-1183-4128-8e1c-d8d348c04e77-ca-trust-extracted\") pod \"image-registry-66df7c8f76-nzgsc\" (UID: \"0abe5722-1183-4128-8e1c-d8d348c04e77\") " pod="openshift-image-registry/image-registry-66df7c8f76-nzgsc" Oct 07 08:02:59 crc kubenswrapper[4875]: I1007 08:02:59.237952 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/0abe5722-1183-4128-8e1c-d8d348c04e77-trusted-ca\") pod \"image-registry-66df7c8f76-nzgsc\" (UID: \"0abe5722-1183-4128-8e1c-d8d348c04e77\") " pod="openshift-image-registry/image-registry-66df7c8f76-nzgsc" Oct 07 08:02:59 crc kubenswrapper[4875]: I1007 08:02:59.238182 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/0abe5722-1183-4128-8e1c-d8d348c04e77-registry-certificates\") pod \"image-registry-66df7c8f76-nzgsc\" (UID: \"0abe5722-1183-4128-8e1c-d8d348c04e77\") " pod="openshift-image-registry/image-registry-66df7c8f76-nzgsc" Oct 07 08:02:59 crc kubenswrapper[4875]: I1007 08:02:59.244280 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/0abe5722-1183-4128-8e1c-d8d348c04e77-registry-tls\") pod \"image-registry-66df7c8f76-nzgsc\" (UID: \"0abe5722-1183-4128-8e1c-d8d348c04e77\") " pod="openshift-image-registry/image-registry-66df7c8f76-nzgsc" Oct 07 08:02:59 crc kubenswrapper[4875]: I1007 08:02:59.248441 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/0abe5722-1183-4128-8e1c-d8d348c04e77-installation-pull-secrets\") pod \"image-registry-66df7c8f76-nzgsc\" (UID: \"0abe5722-1183-4128-8e1c-d8d348c04e77\") " pod="openshift-image-registry/image-registry-66df7c8f76-nzgsc" Oct 07 08:02:59 crc kubenswrapper[4875]: I1007 08:02:59.257973 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xfkc7\" (UniqueName: \"kubernetes.io/projected/0abe5722-1183-4128-8e1c-d8d348c04e77-kube-api-access-xfkc7\") pod \"image-registry-66df7c8f76-nzgsc\" (UID: \"0abe5722-1183-4128-8e1c-d8d348c04e77\") " pod="openshift-image-registry/image-registry-66df7c8f76-nzgsc" Oct 07 08:02:59 crc kubenswrapper[4875]: I1007 08:02:59.260083 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/0abe5722-1183-4128-8e1c-d8d348c04e77-bound-sa-token\") pod \"image-registry-66df7c8f76-nzgsc\" (UID: \"0abe5722-1183-4128-8e1c-d8d348c04e77\") " pod="openshift-image-registry/image-registry-66df7c8f76-nzgsc" Oct 07 08:02:59 crc kubenswrapper[4875]: I1007 08:02:59.318708 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-nzgsc" Oct 07 08:02:59 crc kubenswrapper[4875]: I1007 08:02:59.774056 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-nzgsc"] Oct 07 08:03:00 crc kubenswrapper[4875]: I1007 08:03:00.151234 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-nzgsc" event={"ID":"0abe5722-1183-4128-8e1c-d8d348c04e77","Type":"ContainerStarted","Data":"a4ad51faa9b73218ed198aa9930765595f1133bd6276b6a2ab158548a25077f8"} Oct 07 08:03:00 crc kubenswrapper[4875]: I1007 08:03:00.151693 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-66df7c8f76-nzgsc" Oct 07 08:03:00 crc kubenswrapper[4875]: I1007 08:03:00.151704 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-nzgsc" event={"ID":"0abe5722-1183-4128-8e1c-d8d348c04e77","Type":"ContainerStarted","Data":"5bf918a66a8038118d9a36c4811411108f620acc8ad239dac1dc47b712bf55a2"} Oct 07 08:03:00 crc kubenswrapper[4875]: I1007 08:03:00.175473 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-66df7c8f76-nzgsc" podStartSLOduration=2.175452562 podStartE2EDuration="2.175452562s" podCreationTimestamp="2025-10-07 08:02:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 08:03:00.174805719 +0000 UTC m=+405.134576262" watchObservedRunningTime="2025-10-07 08:03:00.175452562 +0000 UTC m=+405.135223105" Oct 07 08:03:01 crc kubenswrapper[4875]: I1007 08:03:01.221193 4875 patch_prober.go:28] interesting pod/machine-config-daemon-hx68m container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 07 08:03:01 crc kubenswrapper[4875]: I1007 08:03:01.221570 4875 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" podUID="3928c10c-c3da-41eb-96b2-629d67cfb31f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 07 08:03:01 crc kubenswrapper[4875]: I1007 08:03:01.221633 4875 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" Oct 07 08:03:01 crc kubenswrapper[4875]: I1007 08:03:01.222436 4875 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"b8c543d0eadeb6dfce7faef1c8e428b6173e0354739b00fae2ae9968ce2f005f"} pod="openshift-machine-config-operator/machine-config-daemon-hx68m" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 07 08:03:01 crc kubenswrapper[4875]: I1007 08:03:01.222530 4875 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" podUID="3928c10c-c3da-41eb-96b2-629d67cfb31f" containerName="machine-config-daemon" containerID="cri-o://b8c543d0eadeb6dfce7faef1c8e428b6173e0354739b00fae2ae9968ce2f005f" gracePeriod=600 Oct 07 08:03:02 crc kubenswrapper[4875]: I1007 08:03:02.169656 4875 generic.go:334] "Generic (PLEG): container finished" podID="3928c10c-c3da-41eb-96b2-629d67cfb31f" containerID="b8c543d0eadeb6dfce7faef1c8e428b6173e0354739b00fae2ae9968ce2f005f" exitCode=0 Oct 07 08:03:02 crc kubenswrapper[4875]: I1007 08:03:02.169768 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" event={"ID":"3928c10c-c3da-41eb-96b2-629d67cfb31f","Type":"ContainerDied","Data":"b8c543d0eadeb6dfce7faef1c8e428b6173e0354739b00fae2ae9968ce2f005f"} Oct 07 08:03:02 crc kubenswrapper[4875]: I1007 08:03:02.170589 4875 scope.go:117] "RemoveContainer" containerID="34b235fca1c383e868641a953bb908901bcd10528ee9fb668c1f71eb6e0e978a" Oct 07 08:03:02 crc kubenswrapper[4875]: I1007 08:03:02.172164 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" event={"ID":"3928c10c-c3da-41eb-96b2-629d67cfb31f","Type":"ContainerStarted","Data":"172c3f05751ea969696e793f007f95178f7ffca7170b2062b57b5a3d73382148"} Oct 07 08:03:19 crc kubenswrapper[4875]: I1007 08:03:19.324153 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-66df7c8f76-nzgsc" Oct 07 08:03:19 crc kubenswrapper[4875]: I1007 08:03:19.382949 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-b5q5p"] Oct 07 08:03:44 crc kubenswrapper[4875]: I1007 08:03:44.424794 4875 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-image-registry/image-registry-697d97f7c8-b5q5p" podUID="2fcc08ca-5985-4132-a071-6821ef40cc5f" containerName="registry" containerID="cri-o://a2a01886f625e9edc5928ea6b2fc9e9fc948790e5273dce888deec7d811d29ec" gracePeriod=30 Oct 07 08:03:44 crc kubenswrapper[4875]: I1007 08:03:44.845259 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-b5q5p" Oct 07 08:03:44 crc kubenswrapper[4875]: I1007 08:03:44.978135 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/2fcc08ca-5985-4132-a071-6821ef40cc5f-registry-tls\") pod \"2fcc08ca-5985-4132-a071-6821ef40cc5f\" (UID: \"2fcc08ca-5985-4132-a071-6821ef40cc5f\") " Oct 07 08:03:44 crc kubenswrapper[4875]: I1007 08:03:44.978275 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vk65d\" (UniqueName: \"kubernetes.io/projected/2fcc08ca-5985-4132-a071-6821ef40cc5f-kube-api-access-vk65d\") pod \"2fcc08ca-5985-4132-a071-6821ef40cc5f\" (UID: \"2fcc08ca-5985-4132-a071-6821ef40cc5f\") " Oct 07 08:03:44 crc kubenswrapper[4875]: I1007 08:03:44.978536 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-storage\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"2fcc08ca-5985-4132-a071-6821ef40cc5f\" (UID: \"2fcc08ca-5985-4132-a071-6821ef40cc5f\") " Oct 07 08:03:44 crc kubenswrapper[4875]: I1007 08:03:44.978632 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/2fcc08ca-5985-4132-a071-6821ef40cc5f-trusted-ca\") pod \"2fcc08ca-5985-4132-a071-6821ef40cc5f\" (UID: \"2fcc08ca-5985-4132-a071-6821ef40cc5f\") " Oct 07 08:03:44 crc kubenswrapper[4875]: I1007 08:03:44.978698 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/2fcc08ca-5985-4132-a071-6821ef40cc5f-registry-certificates\") pod \"2fcc08ca-5985-4132-a071-6821ef40cc5f\" (UID: \"2fcc08ca-5985-4132-a071-6821ef40cc5f\") " Oct 07 08:03:44 crc kubenswrapper[4875]: I1007 08:03:44.978775 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/2fcc08ca-5985-4132-a071-6821ef40cc5f-ca-trust-extracted\") pod \"2fcc08ca-5985-4132-a071-6821ef40cc5f\" (UID: \"2fcc08ca-5985-4132-a071-6821ef40cc5f\") " Oct 07 08:03:44 crc kubenswrapper[4875]: I1007 08:03:44.979164 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/2fcc08ca-5985-4132-a071-6821ef40cc5f-bound-sa-token\") pod \"2fcc08ca-5985-4132-a071-6821ef40cc5f\" (UID: \"2fcc08ca-5985-4132-a071-6821ef40cc5f\") " Oct 07 08:03:44 crc kubenswrapper[4875]: I1007 08:03:44.980156 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2fcc08ca-5985-4132-a071-6821ef40cc5f-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "2fcc08ca-5985-4132-a071-6821ef40cc5f" (UID: "2fcc08ca-5985-4132-a071-6821ef40cc5f"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 08:03:44 crc kubenswrapper[4875]: I1007 08:03:44.980168 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2fcc08ca-5985-4132-a071-6821ef40cc5f-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "2fcc08ca-5985-4132-a071-6821ef40cc5f" (UID: "2fcc08ca-5985-4132-a071-6821ef40cc5f"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 08:03:44 crc kubenswrapper[4875]: I1007 08:03:44.981278 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/2fcc08ca-5985-4132-a071-6821ef40cc5f-installation-pull-secrets\") pod \"2fcc08ca-5985-4132-a071-6821ef40cc5f\" (UID: \"2fcc08ca-5985-4132-a071-6821ef40cc5f\") " Oct 07 08:03:44 crc kubenswrapper[4875]: I1007 08:03:44.982152 4875 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/2fcc08ca-5985-4132-a071-6821ef40cc5f-registry-certificates\") on node \"crc\" DevicePath \"\"" Oct 07 08:03:44 crc kubenswrapper[4875]: I1007 08:03:44.982770 4875 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/2fcc08ca-5985-4132-a071-6821ef40cc5f-trusted-ca\") on node \"crc\" DevicePath \"\"" Oct 07 08:03:44 crc kubenswrapper[4875]: I1007 08:03:44.987470 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2fcc08ca-5985-4132-a071-6821ef40cc5f-kube-api-access-vk65d" (OuterVolumeSpecName: "kube-api-access-vk65d") pod "2fcc08ca-5985-4132-a071-6821ef40cc5f" (UID: "2fcc08ca-5985-4132-a071-6821ef40cc5f"). InnerVolumeSpecName "kube-api-access-vk65d". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 08:03:44 crc kubenswrapper[4875]: I1007 08:03:44.988175 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2fcc08ca-5985-4132-a071-6821ef40cc5f-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "2fcc08ca-5985-4132-a071-6821ef40cc5f" (UID: "2fcc08ca-5985-4132-a071-6821ef40cc5f"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 08:03:44 crc kubenswrapper[4875]: I1007 08:03:44.993126 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2fcc08ca-5985-4132-a071-6821ef40cc5f-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "2fcc08ca-5985-4132-a071-6821ef40cc5f" (UID: "2fcc08ca-5985-4132-a071-6821ef40cc5f"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:03:44 crc kubenswrapper[4875]: I1007 08:03:44.993186 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2fcc08ca-5985-4132-a071-6821ef40cc5f-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "2fcc08ca-5985-4132-a071-6821ef40cc5f" (UID: "2fcc08ca-5985-4132-a071-6821ef40cc5f"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 08:03:44 crc kubenswrapper[4875]: I1007 08:03:44.993450 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "registry-storage") pod "2fcc08ca-5985-4132-a071-6821ef40cc5f" (UID: "2fcc08ca-5985-4132-a071-6821ef40cc5f"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Oct 07 08:03:44 crc kubenswrapper[4875]: I1007 08:03:44.996375 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2fcc08ca-5985-4132-a071-6821ef40cc5f-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "2fcc08ca-5985-4132-a071-6821ef40cc5f" (UID: "2fcc08ca-5985-4132-a071-6821ef40cc5f"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 08:03:45 crc kubenswrapper[4875]: I1007 08:03:45.084608 4875 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/2fcc08ca-5985-4132-a071-6821ef40cc5f-registry-tls\") on node \"crc\" DevicePath \"\"" Oct 07 08:03:45 crc kubenswrapper[4875]: I1007 08:03:45.084648 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vk65d\" (UniqueName: \"kubernetes.io/projected/2fcc08ca-5985-4132-a071-6821ef40cc5f-kube-api-access-vk65d\") on node \"crc\" DevicePath \"\"" Oct 07 08:03:45 crc kubenswrapper[4875]: I1007 08:03:45.084665 4875 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/2fcc08ca-5985-4132-a071-6821ef40cc5f-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Oct 07 08:03:45 crc kubenswrapper[4875]: I1007 08:03:45.084674 4875 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/2fcc08ca-5985-4132-a071-6821ef40cc5f-bound-sa-token\") on node \"crc\" DevicePath \"\"" Oct 07 08:03:45 crc kubenswrapper[4875]: I1007 08:03:45.084685 4875 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/2fcc08ca-5985-4132-a071-6821ef40cc5f-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Oct 07 08:03:45 crc kubenswrapper[4875]: I1007 08:03:45.484554 4875 generic.go:334] "Generic (PLEG): container finished" podID="2fcc08ca-5985-4132-a071-6821ef40cc5f" containerID="a2a01886f625e9edc5928ea6b2fc9e9fc948790e5273dce888deec7d811d29ec" exitCode=0 Oct 07 08:03:45 crc kubenswrapper[4875]: I1007 08:03:45.484659 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-b5q5p" event={"ID":"2fcc08ca-5985-4132-a071-6821ef40cc5f","Type":"ContainerDied","Data":"a2a01886f625e9edc5928ea6b2fc9e9fc948790e5273dce888deec7d811d29ec"} Oct 07 08:03:45 crc kubenswrapper[4875]: I1007 08:03:45.484732 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-b5q5p" event={"ID":"2fcc08ca-5985-4132-a071-6821ef40cc5f","Type":"ContainerDied","Data":"29f1384caa019917af64256eb8b174d2a9dc3cabdbb1eb9f3acf443be3b7c926"} Oct 07 08:03:45 crc kubenswrapper[4875]: I1007 08:03:45.484798 4875 scope.go:117] "RemoveContainer" containerID="a2a01886f625e9edc5928ea6b2fc9e9fc948790e5273dce888deec7d811d29ec" Oct 07 08:03:45 crc kubenswrapper[4875]: I1007 08:03:45.485139 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-b5q5p" Oct 07 08:03:45 crc kubenswrapper[4875]: I1007 08:03:45.515251 4875 scope.go:117] "RemoveContainer" containerID="a2a01886f625e9edc5928ea6b2fc9e9fc948790e5273dce888deec7d811d29ec" Oct 07 08:03:45 crc kubenswrapper[4875]: E1007 08:03:45.516021 4875 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a2a01886f625e9edc5928ea6b2fc9e9fc948790e5273dce888deec7d811d29ec\": container with ID starting with a2a01886f625e9edc5928ea6b2fc9e9fc948790e5273dce888deec7d811d29ec not found: ID does not exist" containerID="a2a01886f625e9edc5928ea6b2fc9e9fc948790e5273dce888deec7d811d29ec" Oct 07 08:03:45 crc kubenswrapper[4875]: I1007 08:03:45.516069 4875 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a2a01886f625e9edc5928ea6b2fc9e9fc948790e5273dce888deec7d811d29ec"} err="failed to get container status \"a2a01886f625e9edc5928ea6b2fc9e9fc948790e5273dce888deec7d811d29ec\": rpc error: code = NotFound desc = could not find container \"a2a01886f625e9edc5928ea6b2fc9e9fc948790e5273dce888deec7d811d29ec\": container with ID starting with a2a01886f625e9edc5928ea6b2fc9e9fc948790e5273dce888deec7d811d29ec not found: ID does not exist" Oct 07 08:03:45 crc kubenswrapper[4875]: I1007 08:03:45.535865 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-b5q5p"] Oct 07 08:03:45 crc kubenswrapper[4875]: I1007 08:03:45.539750 4875 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-b5q5p"] Oct 07 08:03:45 crc kubenswrapper[4875]: I1007 08:03:45.707450 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2fcc08ca-5985-4132-a071-6821ef40cc5f" path="/var/lib/kubelet/pods/2fcc08ca-5985-4132-a071-6821ef40cc5f/volumes" Oct 07 08:05:01 crc kubenswrapper[4875]: I1007 08:05:01.221259 4875 patch_prober.go:28] interesting pod/machine-config-daemon-hx68m container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 07 08:05:01 crc kubenswrapper[4875]: I1007 08:05:01.222006 4875 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" podUID="3928c10c-c3da-41eb-96b2-629d67cfb31f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 07 08:05:31 crc kubenswrapper[4875]: I1007 08:05:31.221859 4875 patch_prober.go:28] interesting pod/machine-config-daemon-hx68m container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 07 08:05:31 crc kubenswrapper[4875]: I1007 08:05:31.222904 4875 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" podUID="3928c10c-c3da-41eb-96b2-629d67cfb31f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 07 08:06:01 crc kubenswrapper[4875]: I1007 08:06:01.221504 4875 patch_prober.go:28] interesting pod/machine-config-daemon-hx68m container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 07 08:06:01 crc kubenswrapper[4875]: I1007 08:06:01.222437 4875 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" podUID="3928c10c-c3da-41eb-96b2-629d67cfb31f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 07 08:06:01 crc kubenswrapper[4875]: I1007 08:06:01.222512 4875 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" Oct 07 08:06:01 crc kubenswrapper[4875]: I1007 08:06:01.223755 4875 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"172c3f05751ea969696e793f007f95178f7ffca7170b2062b57b5a3d73382148"} pod="openshift-machine-config-operator/machine-config-daemon-hx68m" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 07 08:06:01 crc kubenswrapper[4875]: I1007 08:06:01.223932 4875 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" podUID="3928c10c-c3da-41eb-96b2-629d67cfb31f" containerName="machine-config-daemon" containerID="cri-o://172c3f05751ea969696e793f007f95178f7ffca7170b2062b57b5a3d73382148" gracePeriod=600 Oct 07 08:06:02 crc kubenswrapper[4875]: I1007 08:06:02.317395 4875 generic.go:334] "Generic (PLEG): container finished" podID="3928c10c-c3da-41eb-96b2-629d67cfb31f" containerID="172c3f05751ea969696e793f007f95178f7ffca7170b2062b57b5a3d73382148" exitCode=0 Oct 07 08:06:02 crc kubenswrapper[4875]: I1007 08:06:02.317505 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" event={"ID":"3928c10c-c3da-41eb-96b2-629d67cfb31f","Type":"ContainerDied","Data":"172c3f05751ea969696e793f007f95178f7ffca7170b2062b57b5a3d73382148"} Oct 07 08:06:02 crc kubenswrapper[4875]: I1007 08:06:02.318592 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" event={"ID":"3928c10c-c3da-41eb-96b2-629d67cfb31f","Type":"ContainerStarted","Data":"fd222e466d65c284b99075ccaec23b4472fee8035bb0e466c3b29064fa8b6524"} Oct 07 08:06:02 crc kubenswrapper[4875]: I1007 08:06:02.318638 4875 scope.go:117] "RemoveContainer" containerID="b8c543d0eadeb6dfce7faef1c8e428b6173e0354739b00fae2ae9968ce2f005f" Oct 07 08:06:06 crc kubenswrapper[4875]: I1007 08:06:06.751232 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-wgr7v"] Oct 07 08:06:06 crc kubenswrapper[4875]: E1007 08:06:06.753085 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2fcc08ca-5985-4132-a071-6821ef40cc5f" containerName="registry" Oct 07 08:06:06 crc kubenswrapper[4875]: I1007 08:06:06.753165 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="2fcc08ca-5985-4132-a071-6821ef40cc5f" containerName="registry" Oct 07 08:06:06 crc kubenswrapper[4875]: I1007 08:06:06.753319 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="2fcc08ca-5985-4132-a071-6821ef40cc5f" containerName="registry" Oct 07 08:06:06 crc kubenswrapper[4875]: I1007 08:06:06.753790 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-7f985d654d-wgr7v" Oct 07 08:06:06 crc kubenswrapper[4875]: I1007 08:06:06.754761 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-5b446d88c5-4th7n"] Oct 07 08:06:06 crc kubenswrapper[4875]: I1007 08:06:06.755358 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-5b446d88c5-4th7n" Oct 07 08:06:06 crc kubenswrapper[4875]: I1007 08:06:06.757920 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"kube-root-ca.crt" Oct 07 08:06:06 crc kubenswrapper[4875]: I1007 08:06:06.758249 4875 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-cainjector-dockercfg-bx9wf" Oct 07 08:06:06 crc kubenswrapper[4875]: I1007 08:06:06.758980 4875 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-dockercfg-zf8np" Oct 07 08:06:06 crc kubenswrapper[4875]: I1007 08:06:06.760038 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"openshift-service-ca.crt" Oct 07 08:06:06 crc kubenswrapper[4875]: I1007 08:06:06.768090 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-wgr7v"] Oct 07 08:06:06 crc kubenswrapper[4875]: I1007 08:06:06.795280 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-5b446d88c5-4th7n"] Oct 07 08:06:06 crc kubenswrapper[4875]: I1007 08:06:06.801777 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-rpz2k"] Oct 07 08:06:06 crc kubenswrapper[4875]: I1007 08:06:06.804118 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-5655c58dd6-rpz2k" Oct 07 08:06:06 crc kubenswrapper[4875]: I1007 08:06:06.808342 4875 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-webhook-dockercfg-f7lj6" Oct 07 08:06:06 crc kubenswrapper[4875]: I1007 08:06:06.826001 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-rpz2k"] Oct 07 08:06:06 crc kubenswrapper[4875]: I1007 08:06:06.831852 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lsx8l\" (UniqueName: \"kubernetes.io/projected/3d10ad9b-14eb-4da6-a7b9-8ca414305d45-kube-api-access-lsx8l\") pod \"cert-manager-cainjector-7f985d654d-wgr7v\" (UID: \"3d10ad9b-14eb-4da6-a7b9-8ca414305d45\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-wgr7v" Oct 07 08:06:06 crc kubenswrapper[4875]: I1007 08:06:06.831929 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-458qr\" (UniqueName: \"kubernetes.io/projected/3876b303-4b50-4e7f-afbc-fad7b7196009-kube-api-access-458qr\") pod \"cert-manager-webhook-5655c58dd6-rpz2k\" (UID: \"3876b303-4b50-4e7f-afbc-fad7b7196009\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-rpz2k" Oct 07 08:06:06 crc kubenswrapper[4875]: I1007 08:06:06.831965 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7mtxd\" (UniqueName: \"kubernetes.io/projected/34bd548d-264b-4fca-8e6e-153a3309bc28-kube-api-access-7mtxd\") pod \"cert-manager-5b446d88c5-4th7n\" (UID: \"34bd548d-264b-4fca-8e6e-153a3309bc28\") " pod="cert-manager/cert-manager-5b446d88c5-4th7n" Oct 07 08:06:06 crc kubenswrapper[4875]: I1007 08:06:06.932339 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lsx8l\" (UniqueName: \"kubernetes.io/projected/3d10ad9b-14eb-4da6-a7b9-8ca414305d45-kube-api-access-lsx8l\") pod \"cert-manager-cainjector-7f985d654d-wgr7v\" (UID: \"3d10ad9b-14eb-4da6-a7b9-8ca414305d45\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-wgr7v" Oct 07 08:06:06 crc kubenswrapper[4875]: I1007 08:06:06.932388 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-458qr\" (UniqueName: \"kubernetes.io/projected/3876b303-4b50-4e7f-afbc-fad7b7196009-kube-api-access-458qr\") pod \"cert-manager-webhook-5655c58dd6-rpz2k\" (UID: \"3876b303-4b50-4e7f-afbc-fad7b7196009\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-rpz2k" Oct 07 08:06:06 crc kubenswrapper[4875]: I1007 08:06:06.932417 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7mtxd\" (UniqueName: \"kubernetes.io/projected/34bd548d-264b-4fca-8e6e-153a3309bc28-kube-api-access-7mtxd\") pod \"cert-manager-5b446d88c5-4th7n\" (UID: \"34bd548d-264b-4fca-8e6e-153a3309bc28\") " pod="cert-manager/cert-manager-5b446d88c5-4th7n" Oct 07 08:06:06 crc kubenswrapper[4875]: I1007 08:06:06.954590 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lsx8l\" (UniqueName: \"kubernetes.io/projected/3d10ad9b-14eb-4da6-a7b9-8ca414305d45-kube-api-access-lsx8l\") pod \"cert-manager-cainjector-7f985d654d-wgr7v\" (UID: \"3d10ad9b-14eb-4da6-a7b9-8ca414305d45\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-wgr7v" Oct 07 08:06:06 crc kubenswrapper[4875]: I1007 08:06:06.962042 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-458qr\" (UniqueName: \"kubernetes.io/projected/3876b303-4b50-4e7f-afbc-fad7b7196009-kube-api-access-458qr\") pod \"cert-manager-webhook-5655c58dd6-rpz2k\" (UID: \"3876b303-4b50-4e7f-afbc-fad7b7196009\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-rpz2k" Oct 07 08:06:06 crc kubenswrapper[4875]: I1007 08:06:06.967194 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7mtxd\" (UniqueName: \"kubernetes.io/projected/34bd548d-264b-4fca-8e6e-153a3309bc28-kube-api-access-7mtxd\") pod \"cert-manager-5b446d88c5-4th7n\" (UID: \"34bd548d-264b-4fca-8e6e-153a3309bc28\") " pod="cert-manager/cert-manager-5b446d88c5-4th7n" Oct 07 08:06:07 crc kubenswrapper[4875]: I1007 08:06:07.084562 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-7f985d654d-wgr7v" Oct 07 08:06:07 crc kubenswrapper[4875]: I1007 08:06:07.117315 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-5b446d88c5-4th7n" Oct 07 08:06:07 crc kubenswrapper[4875]: I1007 08:06:07.137281 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-5655c58dd6-rpz2k" Oct 07 08:06:07 crc kubenswrapper[4875]: I1007 08:06:07.346955 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-5b446d88c5-4th7n"] Oct 07 08:06:07 crc kubenswrapper[4875]: I1007 08:06:07.359614 4875 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 07 08:06:07 crc kubenswrapper[4875]: I1007 08:06:07.389637 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-wgr7v"] Oct 07 08:06:07 crc kubenswrapper[4875]: W1007 08:06:07.397752 4875 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3d10ad9b_14eb_4da6_a7b9_8ca414305d45.slice/crio-2e9669c835676088dc44d5d6ea3b6c823bc1ef9ad694546585761d2b379b1991 WatchSource:0}: Error finding container 2e9669c835676088dc44d5d6ea3b6c823bc1ef9ad694546585761d2b379b1991: Status 404 returned error can't find the container with id 2e9669c835676088dc44d5d6ea3b6c823bc1ef9ad694546585761d2b379b1991 Oct 07 08:06:07 crc kubenswrapper[4875]: I1007 08:06:07.408048 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-rpz2k"] Oct 07 08:06:07 crc kubenswrapper[4875]: W1007 08:06:07.416241 4875 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3876b303_4b50_4e7f_afbc_fad7b7196009.slice/crio-f593c7e5beb713df4e903f1caf72028bbfaaa442cf00c02aef0b2554456d681f WatchSource:0}: Error finding container f593c7e5beb713df4e903f1caf72028bbfaaa442cf00c02aef0b2554456d681f: Status 404 returned error can't find the container with id f593c7e5beb713df4e903f1caf72028bbfaaa442cf00c02aef0b2554456d681f Oct 07 08:06:08 crc kubenswrapper[4875]: I1007 08:06:08.361319 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-7f985d654d-wgr7v" event={"ID":"3d10ad9b-14eb-4da6-a7b9-8ca414305d45","Type":"ContainerStarted","Data":"2e9669c835676088dc44d5d6ea3b6c823bc1ef9ad694546585761d2b379b1991"} Oct 07 08:06:08 crc kubenswrapper[4875]: I1007 08:06:08.363465 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-5655c58dd6-rpz2k" event={"ID":"3876b303-4b50-4e7f-afbc-fad7b7196009","Type":"ContainerStarted","Data":"f593c7e5beb713df4e903f1caf72028bbfaaa442cf00c02aef0b2554456d681f"} Oct 07 08:06:08 crc kubenswrapper[4875]: I1007 08:06:08.364574 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-5b446d88c5-4th7n" event={"ID":"34bd548d-264b-4fca-8e6e-153a3309bc28","Type":"ContainerStarted","Data":"0d1d85c0e3349b2726de0fda6b1e3327fdab2715d4543c63961f7150e2d55b6b"} Oct 07 08:06:11 crc kubenswrapper[4875]: I1007 08:06:11.387928 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-5b446d88c5-4th7n" event={"ID":"34bd548d-264b-4fca-8e6e-153a3309bc28","Type":"ContainerStarted","Data":"c63832981664803ee54846d2b22c3e7ef2a8cea18bb836e38b51ecdc8eb5b028"} Oct 07 08:06:11 crc kubenswrapper[4875]: I1007 08:06:11.392061 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-7f985d654d-wgr7v" event={"ID":"3d10ad9b-14eb-4da6-a7b9-8ca414305d45","Type":"ContainerStarted","Data":"74a746dfc7d24e61c7df913e4ae111ffb73eefc04c2b8f15d3b752de29154eb3"} Oct 07 08:06:11 crc kubenswrapper[4875]: I1007 08:06:11.398220 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-5655c58dd6-rpz2k" event={"ID":"3876b303-4b50-4e7f-afbc-fad7b7196009","Type":"ContainerStarted","Data":"48eafc37790f93f886526dd13b431b402d9f24485a9892abe5a05996dcb250af"} Oct 07 08:06:11 crc kubenswrapper[4875]: I1007 08:06:11.399413 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="cert-manager/cert-manager-webhook-5655c58dd6-rpz2k" Oct 07 08:06:11 crc kubenswrapper[4875]: I1007 08:06:11.416141 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-5b446d88c5-4th7n" podStartSLOduration=2.145562017 podStartE2EDuration="5.416102555s" podCreationTimestamp="2025-10-07 08:06:06 +0000 UTC" firstStartedPulling="2025-10-07 08:06:07.359404673 +0000 UTC m=+592.319175216" lastFinishedPulling="2025-10-07 08:06:10.629945171 +0000 UTC m=+595.589715754" observedRunningTime="2025-10-07 08:06:11.404020393 +0000 UTC m=+596.363791036" watchObservedRunningTime="2025-10-07 08:06:11.416102555 +0000 UTC m=+596.375873128" Oct 07 08:06:11 crc kubenswrapper[4875]: I1007 08:06:11.429143 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-webhook-5655c58dd6-rpz2k" podStartSLOduration=2.158135496 podStartE2EDuration="5.429116478s" podCreationTimestamp="2025-10-07 08:06:06 +0000 UTC" firstStartedPulling="2025-10-07 08:06:07.418676095 +0000 UTC m=+592.378446638" lastFinishedPulling="2025-10-07 08:06:10.689657037 +0000 UTC m=+595.649427620" observedRunningTime="2025-10-07 08:06:11.427528228 +0000 UTC m=+596.387298821" watchObservedRunningTime="2025-10-07 08:06:11.429116478 +0000 UTC m=+596.388887031" Oct 07 08:06:11 crc kubenswrapper[4875]: I1007 08:06:11.458757 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-cainjector-7f985d654d-wgr7v" podStartSLOduration=2.228313005 podStartE2EDuration="5.458734093s" podCreationTimestamp="2025-10-07 08:06:06 +0000 UTC" firstStartedPulling="2025-10-07 08:06:07.399559744 +0000 UTC m=+592.359330277" lastFinishedPulling="2025-10-07 08:06:10.629980802 +0000 UTC m=+595.589751365" observedRunningTime="2025-10-07 08:06:11.457968179 +0000 UTC m=+596.417738742" watchObservedRunningTime="2025-10-07 08:06:11.458734093 +0000 UTC m=+596.418504636" Oct 07 08:06:17 crc kubenswrapper[4875]: I1007 08:06:17.140812 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="cert-manager/cert-manager-webhook-5655c58dd6-rpz2k" Oct 07 08:06:17 crc kubenswrapper[4875]: I1007 08:06:17.585184 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-8tcxj"] Oct 07 08:06:17 crc kubenswrapper[4875]: I1007 08:06:17.586419 4875 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-8tcxj" podUID="f7806e48-48e7-4680-af2e-e93a05003370" containerName="ovn-controller" containerID="cri-o://6db9e77922c120dbfa1ccd55c96777ac3b18e0fcf8bb96956414858ae2c8fcff" gracePeriod=30 Oct 07 08:06:17 crc kubenswrapper[4875]: I1007 08:06:17.587190 4875 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-8tcxj" podUID="f7806e48-48e7-4680-af2e-e93a05003370" containerName="kube-rbac-proxy-ovn-metrics" containerID="cri-o://2df6aadbe568d17b58738f194596f0e83432176769f7c5bd87e7d4000d3f1ed8" gracePeriod=30 Oct 07 08:06:17 crc kubenswrapper[4875]: I1007 08:06:17.587301 4875 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-8tcxj" podUID="f7806e48-48e7-4680-af2e-e93a05003370" containerName="kube-rbac-proxy-node" containerID="cri-o://e5e9d96fa7102ca0dcbb0fcc156b21899217a32baae85be23379cca6c8a6a93c" gracePeriod=30 Oct 07 08:06:17 crc kubenswrapper[4875]: I1007 08:06:17.587365 4875 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-8tcxj" podUID="f7806e48-48e7-4680-af2e-e93a05003370" containerName="ovn-acl-logging" containerID="cri-o://3c944396320312ab92eb8dd7f1d771a00b965031717edbbeb2b467cee0788dec" gracePeriod=30 Oct 07 08:06:17 crc kubenswrapper[4875]: I1007 08:06:17.587578 4875 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-8tcxj" podUID="f7806e48-48e7-4680-af2e-e93a05003370" containerName="sbdb" containerID="cri-o://2b3b1269130a0a1192546bec4aa5b2d4bfd65f381f6cd3948dd54d49b482e6d0" gracePeriod=30 Oct 07 08:06:17 crc kubenswrapper[4875]: I1007 08:06:17.587786 4875 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-8tcxj" podUID="f7806e48-48e7-4680-af2e-e93a05003370" containerName="nbdb" containerID="cri-o://5eb9157d6f41a164c7cb952c983b441030a97a8f3b8009359440c5eba9846f26" gracePeriod=30 Oct 07 08:06:17 crc kubenswrapper[4875]: I1007 08:06:17.587547 4875 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-8tcxj" podUID="f7806e48-48e7-4680-af2e-e93a05003370" containerName="northd" containerID="cri-o://8c31cbcb48e003dac873f108a5eb3aaff8d7016f7557fb683ae607d7819d7ceb" gracePeriod=30 Oct 07 08:06:17 crc kubenswrapper[4875]: I1007 08:06:17.629080 4875 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-8tcxj" podUID="f7806e48-48e7-4680-af2e-e93a05003370" containerName="ovnkube-controller" containerID="cri-o://62c8d1f45c0a13b9cdc9fed4836cfe12d101ad6a87b80b8e0c0e79c9cc239cf0" gracePeriod=30 Oct 07 08:06:17 crc kubenswrapper[4875]: E1007 08:06:17.775660 4875 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="2b3b1269130a0a1192546bec4aa5b2d4bfd65f381f6cd3948dd54d49b482e6d0" cmd=["/bin/bash","-c","set -xeo pipefail\n. /ovnkube-lib/ovnkube-lib.sh || exit 1\novndb-readiness-probe \"sb\"\n"] Oct 07 08:06:17 crc kubenswrapper[4875]: E1007 08:06:17.776011 4875 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="5eb9157d6f41a164c7cb952c983b441030a97a8f3b8009359440c5eba9846f26" cmd=["/bin/bash","-c","set -xeo pipefail\n. /ovnkube-lib/ovnkube-lib.sh || exit 1\novndb-readiness-probe \"nb\"\n"] Oct 07 08:06:17 crc kubenswrapper[4875]: E1007 08:06:17.777348 4875 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="5eb9157d6f41a164c7cb952c983b441030a97a8f3b8009359440c5eba9846f26" cmd=["/bin/bash","-c","set -xeo pipefail\n. /ovnkube-lib/ovnkube-lib.sh || exit 1\novndb-readiness-probe \"nb\"\n"] Oct 07 08:06:17 crc kubenswrapper[4875]: E1007 08:06:17.777408 4875 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="2b3b1269130a0a1192546bec4aa5b2d4bfd65f381f6cd3948dd54d49b482e6d0" cmd=["/bin/bash","-c","set -xeo pipefail\n. /ovnkube-lib/ovnkube-lib.sh || exit 1\novndb-readiness-probe \"sb\"\n"] Oct 07 08:06:17 crc kubenswrapper[4875]: E1007 08:06:17.778751 4875 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="2b3b1269130a0a1192546bec4aa5b2d4bfd65f381f6cd3948dd54d49b482e6d0" cmd=["/bin/bash","-c","set -xeo pipefail\n. /ovnkube-lib/ovnkube-lib.sh || exit 1\novndb-readiness-probe \"sb\"\n"] Oct 07 08:06:17 crc kubenswrapper[4875]: E1007 08:06:17.778808 4875 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openshift-ovn-kubernetes/ovnkube-node-8tcxj" podUID="f7806e48-48e7-4680-af2e-e93a05003370" containerName="sbdb" Oct 07 08:06:17 crc kubenswrapper[4875]: E1007 08:06:17.779425 4875 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="5eb9157d6f41a164c7cb952c983b441030a97a8f3b8009359440c5eba9846f26" cmd=["/bin/bash","-c","set -xeo pipefail\n. /ovnkube-lib/ovnkube-lib.sh || exit 1\novndb-readiness-probe \"nb\"\n"] Oct 07 08:06:17 crc kubenswrapper[4875]: E1007 08:06:17.779457 4875 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openshift-ovn-kubernetes/ovnkube-node-8tcxj" podUID="f7806e48-48e7-4680-af2e-e93a05003370" containerName="nbdb" Oct 07 08:06:17 crc kubenswrapper[4875]: I1007 08:06:17.948108 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-8tcxj_f7806e48-48e7-4680-af2e-e93a05003370/ovnkube-controller/3.log" Oct 07 08:06:17 crc kubenswrapper[4875]: I1007 08:06:17.951779 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-8tcxj_f7806e48-48e7-4680-af2e-e93a05003370/ovn-acl-logging/0.log" Oct 07 08:06:17 crc kubenswrapper[4875]: I1007 08:06:17.952368 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-8tcxj_f7806e48-48e7-4680-af2e-e93a05003370/ovn-controller/0.log" Oct 07 08:06:17 crc kubenswrapper[4875]: I1007 08:06:17.952979 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-8tcxj" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.018219 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-jmv2v"] Oct 07 08:06:18 crc kubenswrapper[4875]: E1007 08:06:18.018426 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f7806e48-48e7-4680-af2e-e93a05003370" containerName="ovnkube-controller" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.018439 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="f7806e48-48e7-4680-af2e-e93a05003370" containerName="ovnkube-controller" Oct 07 08:06:18 crc kubenswrapper[4875]: E1007 08:06:18.018448 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f7806e48-48e7-4680-af2e-e93a05003370" containerName="ovnkube-controller" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.018456 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="f7806e48-48e7-4680-af2e-e93a05003370" containerName="ovnkube-controller" Oct 07 08:06:18 crc kubenswrapper[4875]: E1007 08:06:18.018465 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f7806e48-48e7-4680-af2e-e93a05003370" containerName="ovnkube-controller" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.018471 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="f7806e48-48e7-4680-af2e-e93a05003370" containerName="ovnkube-controller" Oct 07 08:06:18 crc kubenswrapper[4875]: E1007 08:06:18.018481 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f7806e48-48e7-4680-af2e-e93a05003370" containerName="kubecfg-setup" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.018486 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="f7806e48-48e7-4680-af2e-e93a05003370" containerName="kubecfg-setup" Oct 07 08:06:18 crc kubenswrapper[4875]: E1007 08:06:18.018494 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f7806e48-48e7-4680-af2e-e93a05003370" containerName="ovn-controller" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.018500 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="f7806e48-48e7-4680-af2e-e93a05003370" containerName="ovn-controller" Oct 07 08:06:18 crc kubenswrapper[4875]: E1007 08:06:18.018512 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f7806e48-48e7-4680-af2e-e93a05003370" containerName="ovn-acl-logging" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.018518 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="f7806e48-48e7-4680-af2e-e93a05003370" containerName="ovn-acl-logging" Oct 07 08:06:18 crc kubenswrapper[4875]: E1007 08:06:18.018523 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f7806e48-48e7-4680-af2e-e93a05003370" containerName="kube-rbac-proxy-ovn-metrics" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.018529 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="f7806e48-48e7-4680-af2e-e93a05003370" containerName="kube-rbac-proxy-ovn-metrics" Oct 07 08:06:18 crc kubenswrapper[4875]: E1007 08:06:18.018538 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f7806e48-48e7-4680-af2e-e93a05003370" containerName="nbdb" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.018544 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="f7806e48-48e7-4680-af2e-e93a05003370" containerName="nbdb" Oct 07 08:06:18 crc kubenswrapper[4875]: E1007 08:06:18.018556 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f7806e48-48e7-4680-af2e-e93a05003370" containerName="kube-rbac-proxy-node" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.018562 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="f7806e48-48e7-4680-af2e-e93a05003370" containerName="kube-rbac-proxy-node" Oct 07 08:06:18 crc kubenswrapper[4875]: E1007 08:06:18.018571 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f7806e48-48e7-4680-af2e-e93a05003370" containerName="sbdb" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.018577 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="f7806e48-48e7-4680-af2e-e93a05003370" containerName="sbdb" Oct 07 08:06:18 crc kubenswrapper[4875]: E1007 08:06:18.018585 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f7806e48-48e7-4680-af2e-e93a05003370" containerName="northd" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.018591 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="f7806e48-48e7-4680-af2e-e93a05003370" containerName="northd" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.018692 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="f7806e48-48e7-4680-af2e-e93a05003370" containerName="ovnkube-controller" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.018704 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="f7806e48-48e7-4680-af2e-e93a05003370" containerName="kube-rbac-proxy-node" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.018709 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="f7806e48-48e7-4680-af2e-e93a05003370" containerName="ovn-controller" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.018718 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="f7806e48-48e7-4680-af2e-e93a05003370" containerName="kube-rbac-proxy-ovn-metrics" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.018727 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="f7806e48-48e7-4680-af2e-e93a05003370" containerName="ovnkube-controller" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.018734 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="f7806e48-48e7-4680-af2e-e93a05003370" containerName="ovnkube-controller" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.018742 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="f7806e48-48e7-4680-af2e-e93a05003370" containerName="sbdb" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.018750 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="f7806e48-48e7-4680-af2e-e93a05003370" containerName="ovnkube-controller" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.018758 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="f7806e48-48e7-4680-af2e-e93a05003370" containerName="ovn-acl-logging" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.018765 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="f7806e48-48e7-4680-af2e-e93a05003370" containerName="nbdb" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.018771 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="f7806e48-48e7-4680-af2e-e93a05003370" containerName="ovnkube-controller" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.018778 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="f7806e48-48e7-4680-af2e-e93a05003370" containerName="northd" Oct 07 08:06:18 crc kubenswrapper[4875]: E1007 08:06:18.018862 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f7806e48-48e7-4680-af2e-e93a05003370" containerName="ovnkube-controller" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.018869 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="f7806e48-48e7-4680-af2e-e93a05003370" containerName="ovnkube-controller" Oct 07 08:06:18 crc kubenswrapper[4875]: E1007 08:06:18.018906 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f7806e48-48e7-4680-af2e-e93a05003370" containerName="ovnkube-controller" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.018911 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="f7806e48-48e7-4680-af2e-e93a05003370" containerName="ovnkube-controller" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.020525 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-jmv2v" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.114192 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/f7806e48-48e7-4680-af2e-e93a05003370-host-var-lib-cni-networks-ovn-kubernetes\") pod \"f7806e48-48e7-4680-af2e-e93a05003370\" (UID: \"f7806e48-48e7-4680-af2e-e93a05003370\") " Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.114319 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f7806e48-48e7-4680-af2e-e93a05003370-var-lib-openvswitch\") pod \"f7806e48-48e7-4680-af2e-e93a05003370\" (UID: \"f7806e48-48e7-4680-af2e-e93a05003370\") " Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.114340 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f7806e48-48e7-4680-af2e-e93a05003370-host-var-lib-cni-networks-ovn-kubernetes" (OuterVolumeSpecName: "host-var-lib-cni-networks-ovn-kubernetes") pod "f7806e48-48e7-4680-af2e-e93a05003370" (UID: "f7806e48-48e7-4680-af2e-e93a05003370"). InnerVolumeSpecName "host-var-lib-cni-networks-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.114391 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f7806e48-48e7-4680-af2e-e93a05003370-var-lib-openvswitch" (OuterVolumeSpecName: "var-lib-openvswitch") pod "f7806e48-48e7-4680-af2e-e93a05003370" (UID: "f7806e48-48e7-4680-af2e-e93a05003370"). InnerVolumeSpecName "var-lib-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.114586 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f7806e48-48e7-4680-af2e-e93a05003370-etc-openvswitch\") pod \"f7806e48-48e7-4680-af2e-e93a05003370\" (UID: \"f7806e48-48e7-4680-af2e-e93a05003370\") " Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.114614 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/f7806e48-48e7-4680-af2e-e93a05003370-host-cni-netd\") pod \"f7806e48-48e7-4680-af2e-e93a05003370\" (UID: \"f7806e48-48e7-4680-af2e-e93a05003370\") " Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.114637 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f7806e48-48e7-4680-af2e-e93a05003370-run-openvswitch\") pod \"f7806e48-48e7-4680-af2e-e93a05003370\" (UID: \"f7806e48-48e7-4680-af2e-e93a05003370\") " Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.114658 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/f7806e48-48e7-4680-af2e-e93a05003370-host-kubelet\") pod \"f7806e48-48e7-4680-af2e-e93a05003370\" (UID: \"f7806e48-48e7-4680-af2e-e93a05003370\") " Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.114686 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/f7806e48-48e7-4680-af2e-e93a05003370-run-systemd\") pod \"f7806e48-48e7-4680-af2e-e93a05003370\" (UID: \"f7806e48-48e7-4680-af2e-e93a05003370\") " Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.114700 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/f7806e48-48e7-4680-af2e-e93a05003370-log-socket\") pod \"f7806e48-48e7-4680-af2e-e93a05003370\" (UID: \"f7806e48-48e7-4680-af2e-e93a05003370\") " Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.114704 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f7806e48-48e7-4680-af2e-e93a05003370-host-cni-netd" (OuterVolumeSpecName: "host-cni-netd") pod "f7806e48-48e7-4680-af2e-e93a05003370" (UID: "f7806e48-48e7-4680-af2e-e93a05003370"). InnerVolumeSpecName "host-cni-netd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.114722 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/f7806e48-48e7-4680-af2e-e93a05003370-env-overrides\") pod \"f7806e48-48e7-4680-af2e-e93a05003370\" (UID: \"f7806e48-48e7-4680-af2e-e93a05003370\") " Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.114734 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f7806e48-48e7-4680-af2e-e93a05003370-etc-openvswitch" (OuterVolumeSpecName: "etc-openvswitch") pod "f7806e48-48e7-4680-af2e-e93a05003370" (UID: "f7806e48-48e7-4680-af2e-e93a05003370"). InnerVolumeSpecName "etc-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.114742 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/f7806e48-48e7-4680-af2e-e93a05003370-host-cni-bin\") pod \"f7806e48-48e7-4680-af2e-e93a05003370\" (UID: \"f7806e48-48e7-4680-af2e-e93a05003370\") " Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.114755 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f7806e48-48e7-4680-af2e-e93a05003370-log-socket" (OuterVolumeSpecName: "log-socket") pod "f7806e48-48e7-4680-af2e-e93a05003370" (UID: "f7806e48-48e7-4680-af2e-e93a05003370"). InnerVolumeSpecName "log-socket". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.114771 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/f7806e48-48e7-4680-af2e-e93a05003370-host-run-ovn-kubernetes\") pod \"f7806e48-48e7-4680-af2e-e93a05003370\" (UID: \"f7806e48-48e7-4680-af2e-e93a05003370\") " Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.114809 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/f7806e48-48e7-4680-af2e-e93a05003370-host-slash\") pod \"f7806e48-48e7-4680-af2e-e93a05003370\" (UID: \"f7806e48-48e7-4680-af2e-e93a05003370\") " Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.114829 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mq886\" (UniqueName: \"kubernetes.io/projected/f7806e48-48e7-4680-af2e-e93a05003370-kube-api-access-mq886\") pod \"f7806e48-48e7-4680-af2e-e93a05003370\" (UID: \"f7806e48-48e7-4680-af2e-e93a05003370\") " Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.114837 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f7806e48-48e7-4680-af2e-e93a05003370-run-openvswitch" (OuterVolumeSpecName: "run-openvswitch") pod "f7806e48-48e7-4680-af2e-e93a05003370" (UID: "f7806e48-48e7-4680-af2e-e93a05003370"). InnerVolumeSpecName "run-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.114847 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/f7806e48-48e7-4680-af2e-e93a05003370-ovn-node-metrics-cert\") pod \"f7806e48-48e7-4680-af2e-e93a05003370\" (UID: \"f7806e48-48e7-4680-af2e-e93a05003370\") " Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.114826 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f7806e48-48e7-4680-af2e-e93a05003370-host-kubelet" (OuterVolumeSpecName: "host-kubelet") pod "f7806e48-48e7-4680-af2e-e93a05003370" (UID: "f7806e48-48e7-4680-af2e-e93a05003370"). InnerVolumeSpecName "host-kubelet". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.114907 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f7806e48-48e7-4680-af2e-e93a05003370-systemd-units" (OuterVolumeSpecName: "systemd-units") pod "f7806e48-48e7-4680-af2e-e93a05003370" (UID: "f7806e48-48e7-4680-af2e-e93a05003370"). InnerVolumeSpecName "systemd-units". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.114868 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/f7806e48-48e7-4680-af2e-e93a05003370-systemd-units\") pod \"f7806e48-48e7-4680-af2e-e93a05003370\" (UID: \"f7806e48-48e7-4680-af2e-e93a05003370\") " Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.114929 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f7806e48-48e7-4680-af2e-e93a05003370-host-slash" (OuterVolumeSpecName: "host-slash") pod "f7806e48-48e7-4680-af2e-e93a05003370" (UID: "f7806e48-48e7-4680-af2e-e93a05003370"). InnerVolumeSpecName "host-slash". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.114939 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f7806e48-48e7-4680-af2e-e93a05003370-host-run-ovn-kubernetes" (OuterVolumeSpecName: "host-run-ovn-kubernetes") pod "f7806e48-48e7-4680-af2e-e93a05003370" (UID: "f7806e48-48e7-4680-af2e-e93a05003370"). InnerVolumeSpecName "host-run-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.114972 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f7806e48-48e7-4680-af2e-e93a05003370-host-cni-bin" (OuterVolumeSpecName: "host-cni-bin") pod "f7806e48-48e7-4680-af2e-e93a05003370" (UID: "f7806e48-48e7-4680-af2e-e93a05003370"). InnerVolumeSpecName "host-cni-bin". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.114977 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/f7806e48-48e7-4680-af2e-e93a05003370-node-log\") pod \"f7806e48-48e7-4680-af2e-e93a05003370\" (UID: \"f7806e48-48e7-4680-af2e-e93a05003370\") " Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.115031 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/f7806e48-48e7-4680-af2e-e93a05003370-ovnkube-script-lib\") pod \"f7806e48-48e7-4680-af2e-e93a05003370\" (UID: \"f7806e48-48e7-4680-af2e-e93a05003370\") " Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.115095 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/f7806e48-48e7-4680-af2e-e93a05003370-ovnkube-config\") pod \"f7806e48-48e7-4680-af2e-e93a05003370\" (UID: \"f7806e48-48e7-4680-af2e-e93a05003370\") " Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.115148 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/f7806e48-48e7-4680-af2e-e93a05003370-run-ovn\") pod \"f7806e48-48e7-4680-af2e-e93a05003370\" (UID: \"f7806e48-48e7-4680-af2e-e93a05003370\") " Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.115193 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/f7806e48-48e7-4680-af2e-e93a05003370-host-run-netns\") pod \"f7806e48-48e7-4680-af2e-e93a05003370\" (UID: \"f7806e48-48e7-4680-af2e-e93a05003370\") " Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.115335 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f7806e48-48e7-4680-af2e-e93a05003370-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "f7806e48-48e7-4680-af2e-e93a05003370" (UID: "f7806e48-48e7-4680-af2e-e93a05003370"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.115414 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f7806e48-48e7-4680-af2e-e93a05003370-node-log" (OuterVolumeSpecName: "node-log") pod "f7806e48-48e7-4680-af2e-e93a05003370" (UID: "f7806e48-48e7-4680-af2e-e93a05003370"). InnerVolumeSpecName "node-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.115432 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/70ddd5a9-ba2d-46f7-8ac9-abcedab76308-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-jmv2v\" (UID: \"70ddd5a9-ba2d-46f7-8ac9-abcedab76308\") " pod="openshift-ovn-kubernetes/ovnkube-node-jmv2v" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.115451 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f7806e48-48e7-4680-af2e-e93a05003370-run-ovn" (OuterVolumeSpecName: "run-ovn") pod "f7806e48-48e7-4680-af2e-e93a05003370" (UID: "f7806e48-48e7-4680-af2e-e93a05003370"). InnerVolumeSpecName "run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.115486 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/70ddd5a9-ba2d-46f7-8ac9-abcedab76308-etc-openvswitch\") pod \"ovnkube-node-jmv2v\" (UID: \"70ddd5a9-ba2d-46f7-8ac9-abcedab76308\") " pod="openshift-ovn-kubernetes/ovnkube-node-jmv2v" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.115549 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/70ddd5a9-ba2d-46f7-8ac9-abcedab76308-host-kubelet\") pod \"ovnkube-node-jmv2v\" (UID: \"70ddd5a9-ba2d-46f7-8ac9-abcedab76308\") " pod="openshift-ovn-kubernetes/ovnkube-node-jmv2v" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.115600 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/70ddd5a9-ba2d-46f7-8ac9-abcedab76308-var-lib-openvswitch\") pod \"ovnkube-node-jmv2v\" (UID: \"70ddd5a9-ba2d-46f7-8ac9-abcedab76308\") " pod="openshift-ovn-kubernetes/ovnkube-node-jmv2v" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.115644 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/70ddd5a9-ba2d-46f7-8ac9-abcedab76308-host-slash\") pod \"ovnkube-node-jmv2v\" (UID: \"70ddd5a9-ba2d-46f7-8ac9-abcedab76308\") " pod="openshift-ovn-kubernetes/ovnkube-node-jmv2v" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.115679 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/70ddd5a9-ba2d-46f7-8ac9-abcedab76308-systemd-units\") pod \"ovnkube-node-jmv2v\" (UID: \"70ddd5a9-ba2d-46f7-8ac9-abcedab76308\") " pod="openshift-ovn-kubernetes/ovnkube-node-jmv2v" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.115729 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/70ddd5a9-ba2d-46f7-8ac9-abcedab76308-host-run-ovn-kubernetes\") pod \"ovnkube-node-jmv2v\" (UID: \"70ddd5a9-ba2d-46f7-8ac9-abcedab76308\") " pod="openshift-ovn-kubernetes/ovnkube-node-jmv2v" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.115770 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/70ddd5a9-ba2d-46f7-8ac9-abcedab76308-host-cni-bin\") pod \"ovnkube-node-jmv2v\" (UID: \"70ddd5a9-ba2d-46f7-8ac9-abcedab76308\") " pod="openshift-ovn-kubernetes/ovnkube-node-jmv2v" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.115785 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f7806e48-48e7-4680-af2e-e93a05003370-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "f7806e48-48e7-4680-af2e-e93a05003370" (UID: "f7806e48-48e7-4680-af2e-e93a05003370"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.115816 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/70ddd5a9-ba2d-46f7-8ac9-abcedab76308-ovnkube-script-lib\") pod \"ovnkube-node-jmv2v\" (UID: \"70ddd5a9-ba2d-46f7-8ac9-abcedab76308\") " pod="openshift-ovn-kubernetes/ovnkube-node-jmv2v" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.115844 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f7806e48-48e7-4680-af2e-e93a05003370-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "f7806e48-48e7-4680-af2e-e93a05003370" (UID: "f7806e48-48e7-4680-af2e-e93a05003370"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.115848 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/70ddd5a9-ba2d-46f7-8ac9-abcedab76308-host-run-netns\") pod \"ovnkube-node-jmv2v\" (UID: \"70ddd5a9-ba2d-46f7-8ac9-abcedab76308\") " pod="openshift-ovn-kubernetes/ovnkube-node-jmv2v" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.115888 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f7806e48-48e7-4680-af2e-e93a05003370-host-run-netns" (OuterVolumeSpecName: "host-run-netns") pod "f7806e48-48e7-4680-af2e-e93a05003370" (UID: "f7806e48-48e7-4680-af2e-e93a05003370"). InnerVolumeSpecName "host-run-netns". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.116013 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/70ddd5a9-ba2d-46f7-8ac9-abcedab76308-ovnkube-config\") pod \"ovnkube-node-jmv2v\" (UID: \"70ddd5a9-ba2d-46f7-8ac9-abcedab76308\") " pod="openshift-ovn-kubernetes/ovnkube-node-jmv2v" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.116069 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/70ddd5a9-ba2d-46f7-8ac9-abcedab76308-log-socket\") pod \"ovnkube-node-jmv2v\" (UID: \"70ddd5a9-ba2d-46f7-8ac9-abcedab76308\") " pod="openshift-ovn-kubernetes/ovnkube-node-jmv2v" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.116097 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/70ddd5a9-ba2d-46f7-8ac9-abcedab76308-node-log\") pod \"ovnkube-node-jmv2v\" (UID: \"70ddd5a9-ba2d-46f7-8ac9-abcedab76308\") " pod="openshift-ovn-kubernetes/ovnkube-node-jmv2v" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.116132 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/70ddd5a9-ba2d-46f7-8ac9-abcedab76308-ovn-node-metrics-cert\") pod \"ovnkube-node-jmv2v\" (UID: \"70ddd5a9-ba2d-46f7-8ac9-abcedab76308\") " pod="openshift-ovn-kubernetes/ovnkube-node-jmv2v" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.116192 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/70ddd5a9-ba2d-46f7-8ac9-abcedab76308-run-systemd\") pod \"ovnkube-node-jmv2v\" (UID: \"70ddd5a9-ba2d-46f7-8ac9-abcedab76308\") " pod="openshift-ovn-kubernetes/ovnkube-node-jmv2v" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.116230 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/70ddd5a9-ba2d-46f7-8ac9-abcedab76308-run-openvswitch\") pod \"ovnkube-node-jmv2v\" (UID: \"70ddd5a9-ba2d-46f7-8ac9-abcedab76308\") " pod="openshift-ovn-kubernetes/ovnkube-node-jmv2v" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.116263 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/70ddd5a9-ba2d-46f7-8ac9-abcedab76308-env-overrides\") pod \"ovnkube-node-jmv2v\" (UID: \"70ddd5a9-ba2d-46f7-8ac9-abcedab76308\") " pod="openshift-ovn-kubernetes/ovnkube-node-jmv2v" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.116334 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-twfw2\" (UniqueName: \"kubernetes.io/projected/70ddd5a9-ba2d-46f7-8ac9-abcedab76308-kube-api-access-twfw2\") pod \"ovnkube-node-jmv2v\" (UID: \"70ddd5a9-ba2d-46f7-8ac9-abcedab76308\") " pod="openshift-ovn-kubernetes/ovnkube-node-jmv2v" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.116411 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/70ddd5a9-ba2d-46f7-8ac9-abcedab76308-run-ovn\") pod \"ovnkube-node-jmv2v\" (UID: \"70ddd5a9-ba2d-46f7-8ac9-abcedab76308\") " pod="openshift-ovn-kubernetes/ovnkube-node-jmv2v" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.116482 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/70ddd5a9-ba2d-46f7-8ac9-abcedab76308-host-cni-netd\") pod \"ovnkube-node-jmv2v\" (UID: \"70ddd5a9-ba2d-46f7-8ac9-abcedab76308\") " pod="openshift-ovn-kubernetes/ovnkube-node-jmv2v" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.116594 4875 reconciler_common.go:293] "Volume detached for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/f7806e48-48e7-4680-af2e-e93a05003370-host-slash\") on node \"crc\" DevicePath \"\"" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.116610 4875 reconciler_common.go:293] "Volume detached for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/f7806e48-48e7-4680-af2e-e93a05003370-systemd-units\") on node \"crc\" DevicePath \"\"" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.116623 4875 reconciler_common.go:293] "Volume detached for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/f7806e48-48e7-4680-af2e-e93a05003370-node-log\") on node \"crc\" DevicePath \"\"" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.116634 4875 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/f7806e48-48e7-4680-af2e-e93a05003370-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.116646 4875 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/f7806e48-48e7-4680-af2e-e93a05003370-ovnkube-config\") on node \"crc\" DevicePath \"\"" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.116658 4875 reconciler_common.go:293] "Volume detached for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/f7806e48-48e7-4680-af2e-e93a05003370-run-ovn\") on node \"crc\" DevicePath \"\"" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.116668 4875 reconciler_common.go:293] "Volume detached for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/f7806e48-48e7-4680-af2e-e93a05003370-host-run-netns\") on node \"crc\" DevicePath \"\"" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.116680 4875 reconciler_common.go:293] "Volume detached for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/f7806e48-48e7-4680-af2e-e93a05003370-host-var-lib-cni-networks-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.116691 4875 reconciler_common.go:293] "Volume detached for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f7806e48-48e7-4680-af2e-e93a05003370-var-lib-openvswitch\") on node \"crc\" DevicePath \"\"" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.116704 4875 reconciler_common.go:293] "Volume detached for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f7806e48-48e7-4680-af2e-e93a05003370-etc-openvswitch\") on node \"crc\" DevicePath \"\"" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.116715 4875 reconciler_common.go:293] "Volume detached for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/f7806e48-48e7-4680-af2e-e93a05003370-host-cni-netd\") on node \"crc\" DevicePath \"\"" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.116753 4875 reconciler_common.go:293] "Volume detached for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f7806e48-48e7-4680-af2e-e93a05003370-run-openvswitch\") on node \"crc\" DevicePath \"\"" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.116780 4875 reconciler_common.go:293] "Volume detached for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/f7806e48-48e7-4680-af2e-e93a05003370-host-kubelet\") on node \"crc\" DevicePath \"\"" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.116797 4875 reconciler_common.go:293] "Volume detached for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/f7806e48-48e7-4680-af2e-e93a05003370-log-socket\") on node \"crc\" DevicePath \"\"" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.116817 4875 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/f7806e48-48e7-4680-af2e-e93a05003370-env-overrides\") on node \"crc\" DevicePath \"\"" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.116834 4875 reconciler_common.go:293] "Volume detached for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/f7806e48-48e7-4680-af2e-e93a05003370-host-cni-bin\") on node \"crc\" DevicePath \"\"" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.116848 4875 reconciler_common.go:293] "Volume detached for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/f7806e48-48e7-4680-af2e-e93a05003370-host-run-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.123538 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f7806e48-48e7-4680-af2e-e93a05003370-kube-api-access-mq886" (OuterVolumeSpecName: "kube-api-access-mq886") pod "f7806e48-48e7-4680-af2e-e93a05003370" (UID: "f7806e48-48e7-4680-af2e-e93a05003370"). InnerVolumeSpecName "kube-api-access-mq886". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.124290 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f7806e48-48e7-4680-af2e-e93a05003370-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "f7806e48-48e7-4680-af2e-e93a05003370" (UID: "f7806e48-48e7-4680-af2e-e93a05003370"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.134637 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f7806e48-48e7-4680-af2e-e93a05003370-run-systemd" (OuterVolumeSpecName: "run-systemd") pod "f7806e48-48e7-4680-af2e-e93a05003370" (UID: "f7806e48-48e7-4680-af2e-e93a05003370"). InnerVolumeSpecName "run-systemd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.218531 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/70ddd5a9-ba2d-46f7-8ac9-abcedab76308-ovnkube-script-lib\") pod \"ovnkube-node-jmv2v\" (UID: \"70ddd5a9-ba2d-46f7-8ac9-abcedab76308\") " pod="openshift-ovn-kubernetes/ovnkube-node-jmv2v" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.219022 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/70ddd5a9-ba2d-46f7-8ac9-abcedab76308-host-run-netns\") pod \"ovnkube-node-jmv2v\" (UID: \"70ddd5a9-ba2d-46f7-8ac9-abcedab76308\") " pod="openshift-ovn-kubernetes/ovnkube-node-jmv2v" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.219065 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/70ddd5a9-ba2d-46f7-8ac9-abcedab76308-ovnkube-config\") pod \"ovnkube-node-jmv2v\" (UID: \"70ddd5a9-ba2d-46f7-8ac9-abcedab76308\") " pod="openshift-ovn-kubernetes/ovnkube-node-jmv2v" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.219099 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/70ddd5a9-ba2d-46f7-8ac9-abcedab76308-log-socket\") pod \"ovnkube-node-jmv2v\" (UID: \"70ddd5a9-ba2d-46f7-8ac9-abcedab76308\") " pod="openshift-ovn-kubernetes/ovnkube-node-jmv2v" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.219122 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/70ddd5a9-ba2d-46f7-8ac9-abcedab76308-node-log\") pod \"ovnkube-node-jmv2v\" (UID: \"70ddd5a9-ba2d-46f7-8ac9-abcedab76308\") " pod="openshift-ovn-kubernetes/ovnkube-node-jmv2v" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.219120 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/70ddd5a9-ba2d-46f7-8ac9-abcedab76308-host-run-netns\") pod \"ovnkube-node-jmv2v\" (UID: \"70ddd5a9-ba2d-46f7-8ac9-abcedab76308\") " pod="openshift-ovn-kubernetes/ovnkube-node-jmv2v" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.219144 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/70ddd5a9-ba2d-46f7-8ac9-abcedab76308-ovn-node-metrics-cert\") pod \"ovnkube-node-jmv2v\" (UID: \"70ddd5a9-ba2d-46f7-8ac9-abcedab76308\") " pod="openshift-ovn-kubernetes/ovnkube-node-jmv2v" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.219196 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/70ddd5a9-ba2d-46f7-8ac9-abcedab76308-run-systemd\") pod \"ovnkube-node-jmv2v\" (UID: \"70ddd5a9-ba2d-46f7-8ac9-abcedab76308\") " pod="openshift-ovn-kubernetes/ovnkube-node-jmv2v" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.219225 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/70ddd5a9-ba2d-46f7-8ac9-abcedab76308-run-openvswitch\") pod \"ovnkube-node-jmv2v\" (UID: \"70ddd5a9-ba2d-46f7-8ac9-abcedab76308\") " pod="openshift-ovn-kubernetes/ovnkube-node-jmv2v" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.219248 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/70ddd5a9-ba2d-46f7-8ac9-abcedab76308-env-overrides\") pod \"ovnkube-node-jmv2v\" (UID: \"70ddd5a9-ba2d-46f7-8ac9-abcedab76308\") " pod="openshift-ovn-kubernetes/ovnkube-node-jmv2v" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.219271 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-twfw2\" (UniqueName: \"kubernetes.io/projected/70ddd5a9-ba2d-46f7-8ac9-abcedab76308-kube-api-access-twfw2\") pod \"ovnkube-node-jmv2v\" (UID: \"70ddd5a9-ba2d-46f7-8ac9-abcedab76308\") " pod="openshift-ovn-kubernetes/ovnkube-node-jmv2v" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.219304 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/70ddd5a9-ba2d-46f7-8ac9-abcedab76308-run-ovn\") pod \"ovnkube-node-jmv2v\" (UID: \"70ddd5a9-ba2d-46f7-8ac9-abcedab76308\") " pod="openshift-ovn-kubernetes/ovnkube-node-jmv2v" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.219327 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/70ddd5a9-ba2d-46f7-8ac9-abcedab76308-host-cni-netd\") pod \"ovnkube-node-jmv2v\" (UID: \"70ddd5a9-ba2d-46f7-8ac9-abcedab76308\") " pod="openshift-ovn-kubernetes/ovnkube-node-jmv2v" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.219362 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/70ddd5a9-ba2d-46f7-8ac9-abcedab76308-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-jmv2v\" (UID: \"70ddd5a9-ba2d-46f7-8ac9-abcedab76308\") " pod="openshift-ovn-kubernetes/ovnkube-node-jmv2v" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.219389 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/70ddd5a9-ba2d-46f7-8ac9-abcedab76308-etc-openvswitch\") pod \"ovnkube-node-jmv2v\" (UID: \"70ddd5a9-ba2d-46f7-8ac9-abcedab76308\") " pod="openshift-ovn-kubernetes/ovnkube-node-jmv2v" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.219421 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/70ddd5a9-ba2d-46f7-8ac9-abcedab76308-host-kubelet\") pod \"ovnkube-node-jmv2v\" (UID: \"70ddd5a9-ba2d-46f7-8ac9-abcedab76308\") " pod="openshift-ovn-kubernetes/ovnkube-node-jmv2v" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.219454 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/70ddd5a9-ba2d-46f7-8ac9-abcedab76308-var-lib-openvswitch\") pod \"ovnkube-node-jmv2v\" (UID: \"70ddd5a9-ba2d-46f7-8ac9-abcedab76308\") " pod="openshift-ovn-kubernetes/ovnkube-node-jmv2v" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.219483 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/70ddd5a9-ba2d-46f7-8ac9-abcedab76308-host-slash\") pod \"ovnkube-node-jmv2v\" (UID: \"70ddd5a9-ba2d-46f7-8ac9-abcedab76308\") " pod="openshift-ovn-kubernetes/ovnkube-node-jmv2v" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.219507 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/70ddd5a9-ba2d-46f7-8ac9-abcedab76308-systemd-units\") pod \"ovnkube-node-jmv2v\" (UID: \"70ddd5a9-ba2d-46f7-8ac9-abcedab76308\") " pod="openshift-ovn-kubernetes/ovnkube-node-jmv2v" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.219537 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/70ddd5a9-ba2d-46f7-8ac9-abcedab76308-host-run-ovn-kubernetes\") pod \"ovnkube-node-jmv2v\" (UID: \"70ddd5a9-ba2d-46f7-8ac9-abcedab76308\") " pod="openshift-ovn-kubernetes/ovnkube-node-jmv2v" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.219562 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/70ddd5a9-ba2d-46f7-8ac9-abcedab76308-host-cni-bin\") pod \"ovnkube-node-jmv2v\" (UID: \"70ddd5a9-ba2d-46f7-8ac9-abcedab76308\") " pod="openshift-ovn-kubernetes/ovnkube-node-jmv2v" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.219611 4875 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/f7806e48-48e7-4680-af2e-e93a05003370-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.219744 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/70ddd5a9-ba2d-46f7-8ac9-abcedab76308-ovnkube-script-lib\") pod \"ovnkube-node-jmv2v\" (UID: \"70ddd5a9-ba2d-46f7-8ac9-abcedab76308\") " pod="openshift-ovn-kubernetes/ovnkube-node-jmv2v" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.219846 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/70ddd5a9-ba2d-46f7-8ac9-abcedab76308-log-socket\") pod \"ovnkube-node-jmv2v\" (UID: \"70ddd5a9-ba2d-46f7-8ac9-abcedab76308\") " pod="openshift-ovn-kubernetes/ovnkube-node-jmv2v" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.220000 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/70ddd5a9-ba2d-46f7-8ac9-abcedab76308-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-jmv2v\" (UID: \"70ddd5a9-ba2d-46f7-8ac9-abcedab76308\") " pod="openshift-ovn-kubernetes/ovnkube-node-jmv2v" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.220127 4875 reconciler_common.go:293] "Volume detached for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/f7806e48-48e7-4680-af2e-e93a05003370-run-systemd\") on node \"crc\" DevicePath \"\"" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.220174 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/70ddd5a9-ba2d-46f7-8ac9-abcedab76308-etc-openvswitch\") pod \"ovnkube-node-jmv2v\" (UID: \"70ddd5a9-ba2d-46f7-8ac9-abcedab76308\") " pod="openshift-ovn-kubernetes/ovnkube-node-jmv2v" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.220212 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/70ddd5a9-ba2d-46f7-8ac9-abcedab76308-host-kubelet\") pod \"ovnkube-node-jmv2v\" (UID: \"70ddd5a9-ba2d-46f7-8ac9-abcedab76308\") " pod="openshift-ovn-kubernetes/ovnkube-node-jmv2v" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.220285 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/70ddd5a9-ba2d-46f7-8ac9-abcedab76308-run-systemd\") pod \"ovnkube-node-jmv2v\" (UID: \"70ddd5a9-ba2d-46f7-8ac9-abcedab76308\") " pod="openshift-ovn-kubernetes/ovnkube-node-jmv2v" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.220306 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/70ddd5a9-ba2d-46f7-8ac9-abcedab76308-var-lib-openvswitch\") pod \"ovnkube-node-jmv2v\" (UID: \"70ddd5a9-ba2d-46f7-8ac9-abcedab76308\") " pod="openshift-ovn-kubernetes/ovnkube-node-jmv2v" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.220341 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/70ddd5a9-ba2d-46f7-8ac9-abcedab76308-run-openvswitch\") pod \"ovnkube-node-jmv2v\" (UID: \"70ddd5a9-ba2d-46f7-8ac9-abcedab76308\") " pod="openshift-ovn-kubernetes/ovnkube-node-jmv2v" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.220515 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/70ddd5a9-ba2d-46f7-8ac9-abcedab76308-ovnkube-config\") pod \"ovnkube-node-jmv2v\" (UID: \"70ddd5a9-ba2d-46f7-8ac9-abcedab76308\") " pod="openshift-ovn-kubernetes/ovnkube-node-jmv2v" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.220553 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/70ddd5a9-ba2d-46f7-8ac9-abcedab76308-run-ovn\") pod \"ovnkube-node-jmv2v\" (UID: \"70ddd5a9-ba2d-46f7-8ac9-abcedab76308\") " pod="openshift-ovn-kubernetes/ovnkube-node-jmv2v" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.220938 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/70ddd5a9-ba2d-46f7-8ac9-abcedab76308-host-cni-netd\") pod \"ovnkube-node-jmv2v\" (UID: \"70ddd5a9-ba2d-46f7-8ac9-abcedab76308\") " pod="openshift-ovn-kubernetes/ovnkube-node-jmv2v" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.220965 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/70ddd5a9-ba2d-46f7-8ac9-abcedab76308-host-run-ovn-kubernetes\") pod \"ovnkube-node-jmv2v\" (UID: \"70ddd5a9-ba2d-46f7-8ac9-abcedab76308\") " pod="openshift-ovn-kubernetes/ovnkube-node-jmv2v" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.220989 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/70ddd5a9-ba2d-46f7-8ac9-abcedab76308-host-slash\") pod \"ovnkube-node-jmv2v\" (UID: \"70ddd5a9-ba2d-46f7-8ac9-abcedab76308\") " pod="openshift-ovn-kubernetes/ovnkube-node-jmv2v" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.220982 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/70ddd5a9-ba2d-46f7-8ac9-abcedab76308-node-log\") pod \"ovnkube-node-jmv2v\" (UID: \"70ddd5a9-ba2d-46f7-8ac9-abcedab76308\") " pod="openshift-ovn-kubernetes/ovnkube-node-jmv2v" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.221011 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/70ddd5a9-ba2d-46f7-8ac9-abcedab76308-systemd-units\") pod \"ovnkube-node-jmv2v\" (UID: \"70ddd5a9-ba2d-46f7-8ac9-abcedab76308\") " pod="openshift-ovn-kubernetes/ovnkube-node-jmv2v" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.221017 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/70ddd5a9-ba2d-46f7-8ac9-abcedab76308-env-overrides\") pod \"ovnkube-node-jmv2v\" (UID: \"70ddd5a9-ba2d-46f7-8ac9-abcedab76308\") " pod="openshift-ovn-kubernetes/ovnkube-node-jmv2v" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.221027 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/70ddd5a9-ba2d-46f7-8ac9-abcedab76308-host-cni-bin\") pod \"ovnkube-node-jmv2v\" (UID: \"70ddd5a9-ba2d-46f7-8ac9-abcedab76308\") " pod="openshift-ovn-kubernetes/ovnkube-node-jmv2v" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.221038 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mq886\" (UniqueName: \"kubernetes.io/projected/f7806e48-48e7-4680-af2e-e93a05003370-kube-api-access-mq886\") on node \"crc\" DevicePath \"\"" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.223913 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/70ddd5a9-ba2d-46f7-8ac9-abcedab76308-ovn-node-metrics-cert\") pod \"ovnkube-node-jmv2v\" (UID: \"70ddd5a9-ba2d-46f7-8ac9-abcedab76308\") " pod="openshift-ovn-kubernetes/ovnkube-node-jmv2v" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.236266 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-twfw2\" (UniqueName: \"kubernetes.io/projected/70ddd5a9-ba2d-46f7-8ac9-abcedab76308-kube-api-access-twfw2\") pod \"ovnkube-node-jmv2v\" (UID: \"70ddd5a9-ba2d-46f7-8ac9-abcedab76308\") " pod="openshift-ovn-kubernetes/ovnkube-node-jmv2v" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.341700 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-jmv2v" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.447778 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-wc2jq_e5a790e1-c591-4cfc-930f-4805a923790b/kube-multus/2.log" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.448838 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-wc2jq_e5a790e1-c591-4cfc-930f-4805a923790b/kube-multus/1.log" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.448958 4875 generic.go:334] "Generic (PLEG): container finished" podID="e5a790e1-c591-4cfc-930f-4805a923790b" containerID="84832734a0dd2b492cf2c766fc86cc44f732aa4c8da2c4a22df1693d00110352" exitCode=2 Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.449069 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-wc2jq" event={"ID":"e5a790e1-c591-4cfc-930f-4805a923790b","Type":"ContainerDied","Data":"84832734a0dd2b492cf2c766fc86cc44f732aa4c8da2c4a22df1693d00110352"} Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.449147 4875 scope.go:117] "RemoveContainer" containerID="512ba91c00a610fe56d1abb8dd82712ba14ce76b4148fe0c69e8864c9a87f977" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.451414 4875 scope.go:117] "RemoveContainer" containerID="84832734a0dd2b492cf2c766fc86cc44f732aa4c8da2c4a22df1693d00110352" Oct 07 08:06:18 crc kubenswrapper[4875]: E1007 08:06:18.451763 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 20s restarting failed container=kube-multus pod=multus-wc2jq_openshift-multus(e5a790e1-c591-4cfc-930f-4805a923790b)\"" pod="openshift-multus/multus-wc2jq" podUID="e5a790e1-c591-4cfc-930f-4805a923790b" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.460352 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-8tcxj_f7806e48-48e7-4680-af2e-e93a05003370/ovnkube-controller/3.log" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.474673 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-8tcxj_f7806e48-48e7-4680-af2e-e93a05003370/ovn-acl-logging/0.log" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.475321 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-8tcxj_f7806e48-48e7-4680-af2e-e93a05003370/ovn-controller/0.log" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.475707 4875 generic.go:334] "Generic (PLEG): container finished" podID="f7806e48-48e7-4680-af2e-e93a05003370" containerID="62c8d1f45c0a13b9cdc9fed4836cfe12d101ad6a87b80b8e0c0e79c9cc239cf0" exitCode=0 Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.475799 4875 generic.go:334] "Generic (PLEG): container finished" podID="f7806e48-48e7-4680-af2e-e93a05003370" containerID="2b3b1269130a0a1192546bec4aa5b2d4bfd65f381f6cd3948dd54d49b482e6d0" exitCode=0 Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.475812 4875 generic.go:334] "Generic (PLEG): container finished" podID="f7806e48-48e7-4680-af2e-e93a05003370" containerID="5eb9157d6f41a164c7cb952c983b441030a97a8f3b8009359440c5eba9846f26" exitCode=0 Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.475824 4875 generic.go:334] "Generic (PLEG): container finished" podID="f7806e48-48e7-4680-af2e-e93a05003370" containerID="8c31cbcb48e003dac873f108a5eb3aaff8d7016f7557fb683ae607d7819d7ceb" exitCode=0 Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.475836 4875 generic.go:334] "Generic (PLEG): container finished" podID="f7806e48-48e7-4680-af2e-e93a05003370" containerID="2df6aadbe568d17b58738f194596f0e83432176769f7c5bd87e7d4000d3f1ed8" exitCode=0 Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.475846 4875 generic.go:334] "Generic (PLEG): container finished" podID="f7806e48-48e7-4680-af2e-e93a05003370" containerID="e5e9d96fa7102ca0dcbb0fcc156b21899217a32baae85be23379cca6c8a6a93c" exitCode=0 Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.475857 4875 generic.go:334] "Generic (PLEG): container finished" podID="f7806e48-48e7-4680-af2e-e93a05003370" containerID="3c944396320312ab92eb8dd7f1d771a00b965031717edbbeb2b467cee0788dec" exitCode=143 Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.475867 4875 generic.go:334] "Generic (PLEG): container finished" podID="f7806e48-48e7-4680-af2e-e93a05003370" containerID="6db9e77922c120dbfa1ccd55c96777ac3b18e0fcf8bb96956414858ae2c8fcff" exitCode=143 Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.475960 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-8tcxj" event={"ID":"f7806e48-48e7-4680-af2e-e93a05003370","Type":"ContainerDied","Data":"62c8d1f45c0a13b9cdc9fed4836cfe12d101ad6a87b80b8e0c0e79c9cc239cf0"} Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.476002 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-8tcxj" event={"ID":"f7806e48-48e7-4680-af2e-e93a05003370","Type":"ContainerDied","Data":"2b3b1269130a0a1192546bec4aa5b2d4bfd65f381f6cd3948dd54d49b482e6d0"} Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.476017 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-8tcxj" event={"ID":"f7806e48-48e7-4680-af2e-e93a05003370","Type":"ContainerDied","Data":"5eb9157d6f41a164c7cb952c983b441030a97a8f3b8009359440c5eba9846f26"} Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.476039 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-8tcxj" event={"ID":"f7806e48-48e7-4680-af2e-e93a05003370","Type":"ContainerDied","Data":"8c31cbcb48e003dac873f108a5eb3aaff8d7016f7557fb683ae607d7819d7ceb"} Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.476058 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-8tcxj" event={"ID":"f7806e48-48e7-4680-af2e-e93a05003370","Type":"ContainerDied","Data":"2df6aadbe568d17b58738f194596f0e83432176769f7c5bd87e7d4000d3f1ed8"} Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.476072 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-8tcxj" event={"ID":"f7806e48-48e7-4680-af2e-e93a05003370","Type":"ContainerDied","Data":"e5e9d96fa7102ca0dcbb0fcc156b21899217a32baae85be23379cca6c8a6a93c"} Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.476086 4875 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"62c8d1f45c0a13b9cdc9fed4836cfe12d101ad6a87b80b8e0c0e79c9cc239cf0"} Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.476098 4875 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"5d8ea2a287411218c24c1e8eaf4aeb64b4833e621385a4d8b32ffc0f79ce461f"} Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.476105 4875 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"2b3b1269130a0a1192546bec4aa5b2d4bfd65f381f6cd3948dd54d49b482e6d0"} Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.476113 4875 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"5eb9157d6f41a164c7cb952c983b441030a97a8f3b8009359440c5eba9846f26"} Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.476121 4875 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"8c31cbcb48e003dac873f108a5eb3aaff8d7016f7557fb683ae607d7819d7ceb"} Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.476128 4875 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"2df6aadbe568d17b58738f194596f0e83432176769f7c5bd87e7d4000d3f1ed8"} Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.476135 4875 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"e5e9d96fa7102ca0dcbb0fcc156b21899217a32baae85be23379cca6c8a6a93c"} Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.476142 4875 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"3c944396320312ab92eb8dd7f1d771a00b965031717edbbeb2b467cee0788dec"} Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.476149 4875 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"6db9e77922c120dbfa1ccd55c96777ac3b18e0fcf8bb96956414858ae2c8fcff"} Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.476148 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-8tcxj" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.476156 4875 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"e7cdfd8fed2f21e7063841e5382e668130288feffd52ef6fc5f9b5bc1894f2a9"} Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.476242 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-8tcxj" event={"ID":"f7806e48-48e7-4680-af2e-e93a05003370","Type":"ContainerDied","Data":"3c944396320312ab92eb8dd7f1d771a00b965031717edbbeb2b467cee0788dec"} Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.476257 4875 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"62c8d1f45c0a13b9cdc9fed4836cfe12d101ad6a87b80b8e0c0e79c9cc239cf0"} Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.476267 4875 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"5d8ea2a287411218c24c1e8eaf4aeb64b4833e621385a4d8b32ffc0f79ce461f"} Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.476274 4875 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"2b3b1269130a0a1192546bec4aa5b2d4bfd65f381f6cd3948dd54d49b482e6d0"} Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.476281 4875 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"5eb9157d6f41a164c7cb952c983b441030a97a8f3b8009359440c5eba9846f26"} Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.476289 4875 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"8c31cbcb48e003dac873f108a5eb3aaff8d7016f7557fb683ae607d7819d7ceb"} Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.476297 4875 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"2df6aadbe568d17b58738f194596f0e83432176769f7c5bd87e7d4000d3f1ed8"} Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.476304 4875 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"e5e9d96fa7102ca0dcbb0fcc156b21899217a32baae85be23379cca6c8a6a93c"} Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.476311 4875 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"3c944396320312ab92eb8dd7f1d771a00b965031717edbbeb2b467cee0788dec"} Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.476318 4875 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"6db9e77922c120dbfa1ccd55c96777ac3b18e0fcf8bb96956414858ae2c8fcff"} Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.476325 4875 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"e7cdfd8fed2f21e7063841e5382e668130288feffd52ef6fc5f9b5bc1894f2a9"} Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.476334 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-8tcxj" event={"ID":"f7806e48-48e7-4680-af2e-e93a05003370","Type":"ContainerDied","Data":"6db9e77922c120dbfa1ccd55c96777ac3b18e0fcf8bb96956414858ae2c8fcff"} Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.476344 4875 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"62c8d1f45c0a13b9cdc9fed4836cfe12d101ad6a87b80b8e0c0e79c9cc239cf0"} Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.476352 4875 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"5d8ea2a287411218c24c1e8eaf4aeb64b4833e621385a4d8b32ffc0f79ce461f"} Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.476359 4875 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"2b3b1269130a0a1192546bec4aa5b2d4bfd65f381f6cd3948dd54d49b482e6d0"} Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.476367 4875 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"5eb9157d6f41a164c7cb952c983b441030a97a8f3b8009359440c5eba9846f26"} Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.476374 4875 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"8c31cbcb48e003dac873f108a5eb3aaff8d7016f7557fb683ae607d7819d7ceb"} Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.476381 4875 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"2df6aadbe568d17b58738f194596f0e83432176769f7c5bd87e7d4000d3f1ed8"} Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.476388 4875 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"e5e9d96fa7102ca0dcbb0fcc156b21899217a32baae85be23379cca6c8a6a93c"} Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.476395 4875 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"3c944396320312ab92eb8dd7f1d771a00b965031717edbbeb2b467cee0788dec"} Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.476403 4875 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"6db9e77922c120dbfa1ccd55c96777ac3b18e0fcf8bb96956414858ae2c8fcff"} Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.476409 4875 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"e7cdfd8fed2f21e7063841e5382e668130288feffd52ef6fc5f9b5bc1894f2a9"} Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.476417 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-8tcxj" event={"ID":"f7806e48-48e7-4680-af2e-e93a05003370","Type":"ContainerDied","Data":"d61778870f0064b780d9a5025459e730041a035bf352302f1c89a14da6660fee"} Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.476428 4875 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"62c8d1f45c0a13b9cdc9fed4836cfe12d101ad6a87b80b8e0c0e79c9cc239cf0"} Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.476435 4875 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"5d8ea2a287411218c24c1e8eaf4aeb64b4833e621385a4d8b32ffc0f79ce461f"} Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.476442 4875 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"2b3b1269130a0a1192546bec4aa5b2d4bfd65f381f6cd3948dd54d49b482e6d0"} Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.476451 4875 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"5eb9157d6f41a164c7cb952c983b441030a97a8f3b8009359440c5eba9846f26"} Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.476457 4875 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"8c31cbcb48e003dac873f108a5eb3aaff8d7016f7557fb683ae607d7819d7ceb"} Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.476464 4875 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"2df6aadbe568d17b58738f194596f0e83432176769f7c5bd87e7d4000d3f1ed8"} Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.476470 4875 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"e5e9d96fa7102ca0dcbb0fcc156b21899217a32baae85be23379cca6c8a6a93c"} Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.476477 4875 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"3c944396320312ab92eb8dd7f1d771a00b965031717edbbeb2b467cee0788dec"} Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.476483 4875 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"6db9e77922c120dbfa1ccd55c96777ac3b18e0fcf8bb96956414858ae2c8fcff"} Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.476490 4875 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"e7cdfd8fed2f21e7063841e5382e668130288feffd52ef6fc5f9b5bc1894f2a9"} Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.487774 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jmv2v" event={"ID":"70ddd5a9-ba2d-46f7-8ac9-abcedab76308","Type":"ContainerStarted","Data":"a17788ccda1152bb3ef9f688ccfcb5bb03b7842ba8dc67309bbc203befc9e720"} Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.512071 4875 scope.go:117] "RemoveContainer" containerID="62c8d1f45c0a13b9cdc9fed4836cfe12d101ad6a87b80b8e0c0e79c9cc239cf0" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.524418 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-8tcxj"] Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.529115 4875 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-8tcxj"] Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.541943 4875 scope.go:117] "RemoveContainer" containerID="5d8ea2a287411218c24c1e8eaf4aeb64b4833e621385a4d8b32ffc0f79ce461f" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.601568 4875 scope.go:117] "RemoveContainer" containerID="2b3b1269130a0a1192546bec4aa5b2d4bfd65f381f6cd3948dd54d49b482e6d0" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.614928 4875 scope.go:117] "RemoveContainer" containerID="5eb9157d6f41a164c7cb952c983b441030a97a8f3b8009359440c5eba9846f26" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.627587 4875 scope.go:117] "RemoveContainer" containerID="8c31cbcb48e003dac873f108a5eb3aaff8d7016f7557fb683ae607d7819d7ceb" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.643087 4875 scope.go:117] "RemoveContainer" containerID="2df6aadbe568d17b58738f194596f0e83432176769f7c5bd87e7d4000d3f1ed8" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.658028 4875 scope.go:117] "RemoveContainer" containerID="e5e9d96fa7102ca0dcbb0fcc156b21899217a32baae85be23379cca6c8a6a93c" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.672987 4875 scope.go:117] "RemoveContainer" containerID="3c944396320312ab92eb8dd7f1d771a00b965031717edbbeb2b467cee0788dec" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.690667 4875 scope.go:117] "RemoveContainer" containerID="6db9e77922c120dbfa1ccd55c96777ac3b18e0fcf8bb96956414858ae2c8fcff" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.708144 4875 scope.go:117] "RemoveContainer" containerID="e7cdfd8fed2f21e7063841e5382e668130288feffd52ef6fc5f9b5bc1894f2a9" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.730636 4875 scope.go:117] "RemoveContainer" containerID="62c8d1f45c0a13b9cdc9fed4836cfe12d101ad6a87b80b8e0c0e79c9cc239cf0" Oct 07 08:06:18 crc kubenswrapper[4875]: E1007 08:06:18.732255 4875 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"62c8d1f45c0a13b9cdc9fed4836cfe12d101ad6a87b80b8e0c0e79c9cc239cf0\": container with ID starting with 62c8d1f45c0a13b9cdc9fed4836cfe12d101ad6a87b80b8e0c0e79c9cc239cf0 not found: ID does not exist" containerID="62c8d1f45c0a13b9cdc9fed4836cfe12d101ad6a87b80b8e0c0e79c9cc239cf0" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.732303 4875 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"62c8d1f45c0a13b9cdc9fed4836cfe12d101ad6a87b80b8e0c0e79c9cc239cf0"} err="failed to get container status \"62c8d1f45c0a13b9cdc9fed4836cfe12d101ad6a87b80b8e0c0e79c9cc239cf0\": rpc error: code = NotFound desc = could not find container \"62c8d1f45c0a13b9cdc9fed4836cfe12d101ad6a87b80b8e0c0e79c9cc239cf0\": container with ID starting with 62c8d1f45c0a13b9cdc9fed4836cfe12d101ad6a87b80b8e0c0e79c9cc239cf0 not found: ID does not exist" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.732335 4875 scope.go:117] "RemoveContainer" containerID="5d8ea2a287411218c24c1e8eaf4aeb64b4833e621385a4d8b32ffc0f79ce461f" Oct 07 08:06:18 crc kubenswrapper[4875]: E1007 08:06:18.732745 4875 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5d8ea2a287411218c24c1e8eaf4aeb64b4833e621385a4d8b32ffc0f79ce461f\": container with ID starting with 5d8ea2a287411218c24c1e8eaf4aeb64b4833e621385a4d8b32ffc0f79ce461f not found: ID does not exist" containerID="5d8ea2a287411218c24c1e8eaf4aeb64b4833e621385a4d8b32ffc0f79ce461f" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.732783 4875 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5d8ea2a287411218c24c1e8eaf4aeb64b4833e621385a4d8b32ffc0f79ce461f"} err="failed to get container status \"5d8ea2a287411218c24c1e8eaf4aeb64b4833e621385a4d8b32ffc0f79ce461f\": rpc error: code = NotFound desc = could not find container \"5d8ea2a287411218c24c1e8eaf4aeb64b4833e621385a4d8b32ffc0f79ce461f\": container with ID starting with 5d8ea2a287411218c24c1e8eaf4aeb64b4833e621385a4d8b32ffc0f79ce461f not found: ID does not exist" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.732802 4875 scope.go:117] "RemoveContainer" containerID="2b3b1269130a0a1192546bec4aa5b2d4bfd65f381f6cd3948dd54d49b482e6d0" Oct 07 08:06:18 crc kubenswrapper[4875]: E1007 08:06:18.733262 4875 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2b3b1269130a0a1192546bec4aa5b2d4bfd65f381f6cd3948dd54d49b482e6d0\": container with ID starting with 2b3b1269130a0a1192546bec4aa5b2d4bfd65f381f6cd3948dd54d49b482e6d0 not found: ID does not exist" containerID="2b3b1269130a0a1192546bec4aa5b2d4bfd65f381f6cd3948dd54d49b482e6d0" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.733302 4875 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2b3b1269130a0a1192546bec4aa5b2d4bfd65f381f6cd3948dd54d49b482e6d0"} err="failed to get container status \"2b3b1269130a0a1192546bec4aa5b2d4bfd65f381f6cd3948dd54d49b482e6d0\": rpc error: code = NotFound desc = could not find container \"2b3b1269130a0a1192546bec4aa5b2d4bfd65f381f6cd3948dd54d49b482e6d0\": container with ID starting with 2b3b1269130a0a1192546bec4aa5b2d4bfd65f381f6cd3948dd54d49b482e6d0 not found: ID does not exist" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.733326 4875 scope.go:117] "RemoveContainer" containerID="5eb9157d6f41a164c7cb952c983b441030a97a8f3b8009359440c5eba9846f26" Oct 07 08:06:18 crc kubenswrapper[4875]: E1007 08:06:18.733764 4875 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5eb9157d6f41a164c7cb952c983b441030a97a8f3b8009359440c5eba9846f26\": container with ID starting with 5eb9157d6f41a164c7cb952c983b441030a97a8f3b8009359440c5eba9846f26 not found: ID does not exist" containerID="5eb9157d6f41a164c7cb952c983b441030a97a8f3b8009359440c5eba9846f26" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.733802 4875 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5eb9157d6f41a164c7cb952c983b441030a97a8f3b8009359440c5eba9846f26"} err="failed to get container status \"5eb9157d6f41a164c7cb952c983b441030a97a8f3b8009359440c5eba9846f26\": rpc error: code = NotFound desc = could not find container \"5eb9157d6f41a164c7cb952c983b441030a97a8f3b8009359440c5eba9846f26\": container with ID starting with 5eb9157d6f41a164c7cb952c983b441030a97a8f3b8009359440c5eba9846f26 not found: ID does not exist" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.733826 4875 scope.go:117] "RemoveContainer" containerID="8c31cbcb48e003dac873f108a5eb3aaff8d7016f7557fb683ae607d7819d7ceb" Oct 07 08:06:18 crc kubenswrapper[4875]: E1007 08:06:18.734211 4875 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8c31cbcb48e003dac873f108a5eb3aaff8d7016f7557fb683ae607d7819d7ceb\": container with ID starting with 8c31cbcb48e003dac873f108a5eb3aaff8d7016f7557fb683ae607d7819d7ceb not found: ID does not exist" containerID="8c31cbcb48e003dac873f108a5eb3aaff8d7016f7557fb683ae607d7819d7ceb" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.734248 4875 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8c31cbcb48e003dac873f108a5eb3aaff8d7016f7557fb683ae607d7819d7ceb"} err="failed to get container status \"8c31cbcb48e003dac873f108a5eb3aaff8d7016f7557fb683ae607d7819d7ceb\": rpc error: code = NotFound desc = could not find container \"8c31cbcb48e003dac873f108a5eb3aaff8d7016f7557fb683ae607d7819d7ceb\": container with ID starting with 8c31cbcb48e003dac873f108a5eb3aaff8d7016f7557fb683ae607d7819d7ceb not found: ID does not exist" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.734266 4875 scope.go:117] "RemoveContainer" containerID="2df6aadbe568d17b58738f194596f0e83432176769f7c5bd87e7d4000d3f1ed8" Oct 07 08:06:18 crc kubenswrapper[4875]: E1007 08:06:18.734631 4875 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2df6aadbe568d17b58738f194596f0e83432176769f7c5bd87e7d4000d3f1ed8\": container with ID starting with 2df6aadbe568d17b58738f194596f0e83432176769f7c5bd87e7d4000d3f1ed8 not found: ID does not exist" containerID="2df6aadbe568d17b58738f194596f0e83432176769f7c5bd87e7d4000d3f1ed8" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.734666 4875 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2df6aadbe568d17b58738f194596f0e83432176769f7c5bd87e7d4000d3f1ed8"} err="failed to get container status \"2df6aadbe568d17b58738f194596f0e83432176769f7c5bd87e7d4000d3f1ed8\": rpc error: code = NotFound desc = could not find container \"2df6aadbe568d17b58738f194596f0e83432176769f7c5bd87e7d4000d3f1ed8\": container with ID starting with 2df6aadbe568d17b58738f194596f0e83432176769f7c5bd87e7d4000d3f1ed8 not found: ID does not exist" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.734682 4875 scope.go:117] "RemoveContainer" containerID="e5e9d96fa7102ca0dcbb0fcc156b21899217a32baae85be23379cca6c8a6a93c" Oct 07 08:06:18 crc kubenswrapper[4875]: E1007 08:06:18.735033 4875 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e5e9d96fa7102ca0dcbb0fcc156b21899217a32baae85be23379cca6c8a6a93c\": container with ID starting with e5e9d96fa7102ca0dcbb0fcc156b21899217a32baae85be23379cca6c8a6a93c not found: ID does not exist" containerID="e5e9d96fa7102ca0dcbb0fcc156b21899217a32baae85be23379cca6c8a6a93c" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.735069 4875 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e5e9d96fa7102ca0dcbb0fcc156b21899217a32baae85be23379cca6c8a6a93c"} err="failed to get container status \"e5e9d96fa7102ca0dcbb0fcc156b21899217a32baae85be23379cca6c8a6a93c\": rpc error: code = NotFound desc = could not find container \"e5e9d96fa7102ca0dcbb0fcc156b21899217a32baae85be23379cca6c8a6a93c\": container with ID starting with e5e9d96fa7102ca0dcbb0fcc156b21899217a32baae85be23379cca6c8a6a93c not found: ID does not exist" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.735093 4875 scope.go:117] "RemoveContainer" containerID="3c944396320312ab92eb8dd7f1d771a00b965031717edbbeb2b467cee0788dec" Oct 07 08:06:18 crc kubenswrapper[4875]: E1007 08:06:18.735449 4875 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3c944396320312ab92eb8dd7f1d771a00b965031717edbbeb2b467cee0788dec\": container with ID starting with 3c944396320312ab92eb8dd7f1d771a00b965031717edbbeb2b467cee0788dec not found: ID does not exist" containerID="3c944396320312ab92eb8dd7f1d771a00b965031717edbbeb2b467cee0788dec" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.735485 4875 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3c944396320312ab92eb8dd7f1d771a00b965031717edbbeb2b467cee0788dec"} err="failed to get container status \"3c944396320312ab92eb8dd7f1d771a00b965031717edbbeb2b467cee0788dec\": rpc error: code = NotFound desc = could not find container \"3c944396320312ab92eb8dd7f1d771a00b965031717edbbeb2b467cee0788dec\": container with ID starting with 3c944396320312ab92eb8dd7f1d771a00b965031717edbbeb2b467cee0788dec not found: ID does not exist" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.735504 4875 scope.go:117] "RemoveContainer" containerID="6db9e77922c120dbfa1ccd55c96777ac3b18e0fcf8bb96956414858ae2c8fcff" Oct 07 08:06:18 crc kubenswrapper[4875]: E1007 08:06:18.735809 4875 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6db9e77922c120dbfa1ccd55c96777ac3b18e0fcf8bb96956414858ae2c8fcff\": container with ID starting with 6db9e77922c120dbfa1ccd55c96777ac3b18e0fcf8bb96956414858ae2c8fcff not found: ID does not exist" containerID="6db9e77922c120dbfa1ccd55c96777ac3b18e0fcf8bb96956414858ae2c8fcff" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.735845 4875 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6db9e77922c120dbfa1ccd55c96777ac3b18e0fcf8bb96956414858ae2c8fcff"} err="failed to get container status \"6db9e77922c120dbfa1ccd55c96777ac3b18e0fcf8bb96956414858ae2c8fcff\": rpc error: code = NotFound desc = could not find container \"6db9e77922c120dbfa1ccd55c96777ac3b18e0fcf8bb96956414858ae2c8fcff\": container with ID starting with 6db9e77922c120dbfa1ccd55c96777ac3b18e0fcf8bb96956414858ae2c8fcff not found: ID does not exist" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.735861 4875 scope.go:117] "RemoveContainer" containerID="e7cdfd8fed2f21e7063841e5382e668130288feffd52ef6fc5f9b5bc1894f2a9" Oct 07 08:06:18 crc kubenswrapper[4875]: E1007 08:06:18.736160 4875 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e7cdfd8fed2f21e7063841e5382e668130288feffd52ef6fc5f9b5bc1894f2a9\": container with ID starting with e7cdfd8fed2f21e7063841e5382e668130288feffd52ef6fc5f9b5bc1894f2a9 not found: ID does not exist" containerID="e7cdfd8fed2f21e7063841e5382e668130288feffd52ef6fc5f9b5bc1894f2a9" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.736191 4875 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e7cdfd8fed2f21e7063841e5382e668130288feffd52ef6fc5f9b5bc1894f2a9"} err="failed to get container status \"e7cdfd8fed2f21e7063841e5382e668130288feffd52ef6fc5f9b5bc1894f2a9\": rpc error: code = NotFound desc = could not find container \"e7cdfd8fed2f21e7063841e5382e668130288feffd52ef6fc5f9b5bc1894f2a9\": container with ID starting with e7cdfd8fed2f21e7063841e5382e668130288feffd52ef6fc5f9b5bc1894f2a9 not found: ID does not exist" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.736211 4875 scope.go:117] "RemoveContainer" containerID="62c8d1f45c0a13b9cdc9fed4836cfe12d101ad6a87b80b8e0c0e79c9cc239cf0" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.736504 4875 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"62c8d1f45c0a13b9cdc9fed4836cfe12d101ad6a87b80b8e0c0e79c9cc239cf0"} err="failed to get container status \"62c8d1f45c0a13b9cdc9fed4836cfe12d101ad6a87b80b8e0c0e79c9cc239cf0\": rpc error: code = NotFound desc = could not find container \"62c8d1f45c0a13b9cdc9fed4836cfe12d101ad6a87b80b8e0c0e79c9cc239cf0\": container with ID starting with 62c8d1f45c0a13b9cdc9fed4836cfe12d101ad6a87b80b8e0c0e79c9cc239cf0 not found: ID does not exist" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.736541 4875 scope.go:117] "RemoveContainer" containerID="5d8ea2a287411218c24c1e8eaf4aeb64b4833e621385a4d8b32ffc0f79ce461f" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.736859 4875 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5d8ea2a287411218c24c1e8eaf4aeb64b4833e621385a4d8b32ffc0f79ce461f"} err="failed to get container status \"5d8ea2a287411218c24c1e8eaf4aeb64b4833e621385a4d8b32ffc0f79ce461f\": rpc error: code = NotFound desc = could not find container \"5d8ea2a287411218c24c1e8eaf4aeb64b4833e621385a4d8b32ffc0f79ce461f\": container with ID starting with 5d8ea2a287411218c24c1e8eaf4aeb64b4833e621385a4d8b32ffc0f79ce461f not found: ID does not exist" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.736905 4875 scope.go:117] "RemoveContainer" containerID="2b3b1269130a0a1192546bec4aa5b2d4bfd65f381f6cd3948dd54d49b482e6d0" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.737150 4875 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2b3b1269130a0a1192546bec4aa5b2d4bfd65f381f6cd3948dd54d49b482e6d0"} err="failed to get container status \"2b3b1269130a0a1192546bec4aa5b2d4bfd65f381f6cd3948dd54d49b482e6d0\": rpc error: code = NotFound desc = could not find container \"2b3b1269130a0a1192546bec4aa5b2d4bfd65f381f6cd3948dd54d49b482e6d0\": container with ID starting with 2b3b1269130a0a1192546bec4aa5b2d4bfd65f381f6cd3948dd54d49b482e6d0 not found: ID does not exist" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.737182 4875 scope.go:117] "RemoveContainer" containerID="5eb9157d6f41a164c7cb952c983b441030a97a8f3b8009359440c5eba9846f26" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.737569 4875 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5eb9157d6f41a164c7cb952c983b441030a97a8f3b8009359440c5eba9846f26"} err="failed to get container status \"5eb9157d6f41a164c7cb952c983b441030a97a8f3b8009359440c5eba9846f26\": rpc error: code = NotFound desc = could not find container \"5eb9157d6f41a164c7cb952c983b441030a97a8f3b8009359440c5eba9846f26\": container with ID starting with 5eb9157d6f41a164c7cb952c983b441030a97a8f3b8009359440c5eba9846f26 not found: ID does not exist" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.737622 4875 scope.go:117] "RemoveContainer" containerID="8c31cbcb48e003dac873f108a5eb3aaff8d7016f7557fb683ae607d7819d7ceb" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.737976 4875 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8c31cbcb48e003dac873f108a5eb3aaff8d7016f7557fb683ae607d7819d7ceb"} err="failed to get container status \"8c31cbcb48e003dac873f108a5eb3aaff8d7016f7557fb683ae607d7819d7ceb\": rpc error: code = NotFound desc = could not find container \"8c31cbcb48e003dac873f108a5eb3aaff8d7016f7557fb683ae607d7819d7ceb\": container with ID starting with 8c31cbcb48e003dac873f108a5eb3aaff8d7016f7557fb683ae607d7819d7ceb not found: ID does not exist" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.738020 4875 scope.go:117] "RemoveContainer" containerID="2df6aadbe568d17b58738f194596f0e83432176769f7c5bd87e7d4000d3f1ed8" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.738666 4875 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2df6aadbe568d17b58738f194596f0e83432176769f7c5bd87e7d4000d3f1ed8"} err="failed to get container status \"2df6aadbe568d17b58738f194596f0e83432176769f7c5bd87e7d4000d3f1ed8\": rpc error: code = NotFound desc = could not find container \"2df6aadbe568d17b58738f194596f0e83432176769f7c5bd87e7d4000d3f1ed8\": container with ID starting with 2df6aadbe568d17b58738f194596f0e83432176769f7c5bd87e7d4000d3f1ed8 not found: ID does not exist" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.738691 4875 scope.go:117] "RemoveContainer" containerID="e5e9d96fa7102ca0dcbb0fcc156b21899217a32baae85be23379cca6c8a6a93c" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.739103 4875 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e5e9d96fa7102ca0dcbb0fcc156b21899217a32baae85be23379cca6c8a6a93c"} err="failed to get container status \"e5e9d96fa7102ca0dcbb0fcc156b21899217a32baae85be23379cca6c8a6a93c\": rpc error: code = NotFound desc = could not find container \"e5e9d96fa7102ca0dcbb0fcc156b21899217a32baae85be23379cca6c8a6a93c\": container with ID starting with e5e9d96fa7102ca0dcbb0fcc156b21899217a32baae85be23379cca6c8a6a93c not found: ID does not exist" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.739174 4875 scope.go:117] "RemoveContainer" containerID="3c944396320312ab92eb8dd7f1d771a00b965031717edbbeb2b467cee0788dec" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.739591 4875 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3c944396320312ab92eb8dd7f1d771a00b965031717edbbeb2b467cee0788dec"} err="failed to get container status \"3c944396320312ab92eb8dd7f1d771a00b965031717edbbeb2b467cee0788dec\": rpc error: code = NotFound desc = could not find container \"3c944396320312ab92eb8dd7f1d771a00b965031717edbbeb2b467cee0788dec\": container with ID starting with 3c944396320312ab92eb8dd7f1d771a00b965031717edbbeb2b467cee0788dec not found: ID does not exist" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.739615 4875 scope.go:117] "RemoveContainer" containerID="6db9e77922c120dbfa1ccd55c96777ac3b18e0fcf8bb96956414858ae2c8fcff" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.740213 4875 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6db9e77922c120dbfa1ccd55c96777ac3b18e0fcf8bb96956414858ae2c8fcff"} err="failed to get container status \"6db9e77922c120dbfa1ccd55c96777ac3b18e0fcf8bb96956414858ae2c8fcff\": rpc error: code = NotFound desc = could not find container \"6db9e77922c120dbfa1ccd55c96777ac3b18e0fcf8bb96956414858ae2c8fcff\": container with ID starting with 6db9e77922c120dbfa1ccd55c96777ac3b18e0fcf8bb96956414858ae2c8fcff not found: ID does not exist" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.740244 4875 scope.go:117] "RemoveContainer" containerID="e7cdfd8fed2f21e7063841e5382e668130288feffd52ef6fc5f9b5bc1894f2a9" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.740605 4875 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e7cdfd8fed2f21e7063841e5382e668130288feffd52ef6fc5f9b5bc1894f2a9"} err="failed to get container status \"e7cdfd8fed2f21e7063841e5382e668130288feffd52ef6fc5f9b5bc1894f2a9\": rpc error: code = NotFound desc = could not find container \"e7cdfd8fed2f21e7063841e5382e668130288feffd52ef6fc5f9b5bc1894f2a9\": container with ID starting with e7cdfd8fed2f21e7063841e5382e668130288feffd52ef6fc5f9b5bc1894f2a9 not found: ID does not exist" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.740649 4875 scope.go:117] "RemoveContainer" containerID="62c8d1f45c0a13b9cdc9fed4836cfe12d101ad6a87b80b8e0c0e79c9cc239cf0" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.741103 4875 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"62c8d1f45c0a13b9cdc9fed4836cfe12d101ad6a87b80b8e0c0e79c9cc239cf0"} err="failed to get container status \"62c8d1f45c0a13b9cdc9fed4836cfe12d101ad6a87b80b8e0c0e79c9cc239cf0\": rpc error: code = NotFound desc = could not find container \"62c8d1f45c0a13b9cdc9fed4836cfe12d101ad6a87b80b8e0c0e79c9cc239cf0\": container with ID starting with 62c8d1f45c0a13b9cdc9fed4836cfe12d101ad6a87b80b8e0c0e79c9cc239cf0 not found: ID does not exist" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.741127 4875 scope.go:117] "RemoveContainer" containerID="5d8ea2a287411218c24c1e8eaf4aeb64b4833e621385a4d8b32ffc0f79ce461f" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.741496 4875 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5d8ea2a287411218c24c1e8eaf4aeb64b4833e621385a4d8b32ffc0f79ce461f"} err="failed to get container status \"5d8ea2a287411218c24c1e8eaf4aeb64b4833e621385a4d8b32ffc0f79ce461f\": rpc error: code = NotFound desc = could not find container \"5d8ea2a287411218c24c1e8eaf4aeb64b4833e621385a4d8b32ffc0f79ce461f\": container with ID starting with 5d8ea2a287411218c24c1e8eaf4aeb64b4833e621385a4d8b32ffc0f79ce461f not found: ID does not exist" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.741546 4875 scope.go:117] "RemoveContainer" containerID="2b3b1269130a0a1192546bec4aa5b2d4bfd65f381f6cd3948dd54d49b482e6d0" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.742019 4875 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2b3b1269130a0a1192546bec4aa5b2d4bfd65f381f6cd3948dd54d49b482e6d0"} err="failed to get container status \"2b3b1269130a0a1192546bec4aa5b2d4bfd65f381f6cd3948dd54d49b482e6d0\": rpc error: code = NotFound desc = could not find container \"2b3b1269130a0a1192546bec4aa5b2d4bfd65f381f6cd3948dd54d49b482e6d0\": container with ID starting with 2b3b1269130a0a1192546bec4aa5b2d4bfd65f381f6cd3948dd54d49b482e6d0 not found: ID does not exist" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.742048 4875 scope.go:117] "RemoveContainer" containerID="5eb9157d6f41a164c7cb952c983b441030a97a8f3b8009359440c5eba9846f26" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.742482 4875 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5eb9157d6f41a164c7cb952c983b441030a97a8f3b8009359440c5eba9846f26"} err="failed to get container status \"5eb9157d6f41a164c7cb952c983b441030a97a8f3b8009359440c5eba9846f26\": rpc error: code = NotFound desc = could not find container \"5eb9157d6f41a164c7cb952c983b441030a97a8f3b8009359440c5eba9846f26\": container with ID starting with 5eb9157d6f41a164c7cb952c983b441030a97a8f3b8009359440c5eba9846f26 not found: ID does not exist" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.742521 4875 scope.go:117] "RemoveContainer" containerID="8c31cbcb48e003dac873f108a5eb3aaff8d7016f7557fb683ae607d7819d7ceb" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.742807 4875 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8c31cbcb48e003dac873f108a5eb3aaff8d7016f7557fb683ae607d7819d7ceb"} err="failed to get container status \"8c31cbcb48e003dac873f108a5eb3aaff8d7016f7557fb683ae607d7819d7ceb\": rpc error: code = NotFound desc = could not find container \"8c31cbcb48e003dac873f108a5eb3aaff8d7016f7557fb683ae607d7819d7ceb\": container with ID starting with 8c31cbcb48e003dac873f108a5eb3aaff8d7016f7557fb683ae607d7819d7ceb not found: ID does not exist" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.742838 4875 scope.go:117] "RemoveContainer" containerID="2df6aadbe568d17b58738f194596f0e83432176769f7c5bd87e7d4000d3f1ed8" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.743171 4875 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2df6aadbe568d17b58738f194596f0e83432176769f7c5bd87e7d4000d3f1ed8"} err="failed to get container status \"2df6aadbe568d17b58738f194596f0e83432176769f7c5bd87e7d4000d3f1ed8\": rpc error: code = NotFound desc = could not find container \"2df6aadbe568d17b58738f194596f0e83432176769f7c5bd87e7d4000d3f1ed8\": container with ID starting with 2df6aadbe568d17b58738f194596f0e83432176769f7c5bd87e7d4000d3f1ed8 not found: ID does not exist" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.743194 4875 scope.go:117] "RemoveContainer" containerID="e5e9d96fa7102ca0dcbb0fcc156b21899217a32baae85be23379cca6c8a6a93c" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.743533 4875 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e5e9d96fa7102ca0dcbb0fcc156b21899217a32baae85be23379cca6c8a6a93c"} err="failed to get container status \"e5e9d96fa7102ca0dcbb0fcc156b21899217a32baae85be23379cca6c8a6a93c\": rpc error: code = NotFound desc = could not find container \"e5e9d96fa7102ca0dcbb0fcc156b21899217a32baae85be23379cca6c8a6a93c\": container with ID starting with e5e9d96fa7102ca0dcbb0fcc156b21899217a32baae85be23379cca6c8a6a93c not found: ID does not exist" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.743560 4875 scope.go:117] "RemoveContainer" containerID="3c944396320312ab92eb8dd7f1d771a00b965031717edbbeb2b467cee0788dec" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.744019 4875 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3c944396320312ab92eb8dd7f1d771a00b965031717edbbeb2b467cee0788dec"} err="failed to get container status \"3c944396320312ab92eb8dd7f1d771a00b965031717edbbeb2b467cee0788dec\": rpc error: code = NotFound desc = could not find container \"3c944396320312ab92eb8dd7f1d771a00b965031717edbbeb2b467cee0788dec\": container with ID starting with 3c944396320312ab92eb8dd7f1d771a00b965031717edbbeb2b467cee0788dec not found: ID does not exist" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.744044 4875 scope.go:117] "RemoveContainer" containerID="6db9e77922c120dbfa1ccd55c96777ac3b18e0fcf8bb96956414858ae2c8fcff" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.744321 4875 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6db9e77922c120dbfa1ccd55c96777ac3b18e0fcf8bb96956414858ae2c8fcff"} err="failed to get container status \"6db9e77922c120dbfa1ccd55c96777ac3b18e0fcf8bb96956414858ae2c8fcff\": rpc error: code = NotFound desc = could not find container \"6db9e77922c120dbfa1ccd55c96777ac3b18e0fcf8bb96956414858ae2c8fcff\": container with ID starting with 6db9e77922c120dbfa1ccd55c96777ac3b18e0fcf8bb96956414858ae2c8fcff not found: ID does not exist" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.744362 4875 scope.go:117] "RemoveContainer" containerID="e7cdfd8fed2f21e7063841e5382e668130288feffd52ef6fc5f9b5bc1894f2a9" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.744710 4875 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e7cdfd8fed2f21e7063841e5382e668130288feffd52ef6fc5f9b5bc1894f2a9"} err="failed to get container status \"e7cdfd8fed2f21e7063841e5382e668130288feffd52ef6fc5f9b5bc1894f2a9\": rpc error: code = NotFound desc = could not find container \"e7cdfd8fed2f21e7063841e5382e668130288feffd52ef6fc5f9b5bc1894f2a9\": container with ID starting with e7cdfd8fed2f21e7063841e5382e668130288feffd52ef6fc5f9b5bc1894f2a9 not found: ID does not exist" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.744735 4875 scope.go:117] "RemoveContainer" containerID="62c8d1f45c0a13b9cdc9fed4836cfe12d101ad6a87b80b8e0c0e79c9cc239cf0" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.745085 4875 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"62c8d1f45c0a13b9cdc9fed4836cfe12d101ad6a87b80b8e0c0e79c9cc239cf0"} err="failed to get container status \"62c8d1f45c0a13b9cdc9fed4836cfe12d101ad6a87b80b8e0c0e79c9cc239cf0\": rpc error: code = NotFound desc = could not find container \"62c8d1f45c0a13b9cdc9fed4836cfe12d101ad6a87b80b8e0c0e79c9cc239cf0\": container with ID starting with 62c8d1f45c0a13b9cdc9fed4836cfe12d101ad6a87b80b8e0c0e79c9cc239cf0 not found: ID does not exist" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.745115 4875 scope.go:117] "RemoveContainer" containerID="5d8ea2a287411218c24c1e8eaf4aeb64b4833e621385a4d8b32ffc0f79ce461f" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.745489 4875 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5d8ea2a287411218c24c1e8eaf4aeb64b4833e621385a4d8b32ffc0f79ce461f"} err="failed to get container status \"5d8ea2a287411218c24c1e8eaf4aeb64b4833e621385a4d8b32ffc0f79ce461f\": rpc error: code = NotFound desc = could not find container \"5d8ea2a287411218c24c1e8eaf4aeb64b4833e621385a4d8b32ffc0f79ce461f\": container with ID starting with 5d8ea2a287411218c24c1e8eaf4aeb64b4833e621385a4d8b32ffc0f79ce461f not found: ID does not exist" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.745521 4875 scope.go:117] "RemoveContainer" containerID="2b3b1269130a0a1192546bec4aa5b2d4bfd65f381f6cd3948dd54d49b482e6d0" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.745960 4875 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2b3b1269130a0a1192546bec4aa5b2d4bfd65f381f6cd3948dd54d49b482e6d0"} err="failed to get container status \"2b3b1269130a0a1192546bec4aa5b2d4bfd65f381f6cd3948dd54d49b482e6d0\": rpc error: code = NotFound desc = could not find container \"2b3b1269130a0a1192546bec4aa5b2d4bfd65f381f6cd3948dd54d49b482e6d0\": container with ID starting with 2b3b1269130a0a1192546bec4aa5b2d4bfd65f381f6cd3948dd54d49b482e6d0 not found: ID does not exist" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.746012 4875 scope.go:117] "RemoveContainer" containerID="5eb9157d6f41a164c7cb952c983b441030a97a8f3b8009359440c5eba9846f26" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.746378 4875 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5eb9157d6f41a164c7cb952c983b441030a97a8f3b8009359440c5eba9846f26"} err="failed to get container status \"5eb9157d6f41a164c7cb952c983b441030a97a8f3b8009359440c5eba9846f26\": rpc error: code = NotFound desc = could not find container \"5eb9157d6f41a164c7cb952c983b441030a97a8f3b8009359440c5eba9846f26\": container with ID starting with 5eb9157d6f41a164c7cb952c983b441030a97a8f3b8009359440c5eba9846f26 not found: ID does not exist" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.746419 4875 scope.go:117] "RemoveContainer" containerID="8c31cbcb48e003dac873f108a5eb3aaff8d7016f7557fb683ae607d7819d7ceb" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.748354 4875 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8c31cbcb48e003dac873f108a5eb3aaff8d7016f7557fb683ae607d7819d7ceb"} err="failed to get container status \"8c31cbcb48e003dac873f108a5eb3aaff8d7016f7557fb683ae607d7819d7ceb\": rpc error: code = NotFound desc = could not find container \"8c31cbcb48e003dac873f108a5eb3aaff8d7016f7557fb683ae607d7819d7ceb\": container with ID starting with 8c31cbcb48e003dac873f108a5eb3aaff8d7016f7557fb683ae607d7819d7ceb not found: ID does not exist" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.748394 4875 scope.go:117] "RemoveContainer" containerID="2df6aadbe568d17b58738f194596f0e83432176769f7c5bd87e7d4000d3f1ed8" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.748752 4875 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2df6aadbe568d17b58738f194596f0e83432176769f7c5bd87e7d4000d3f1ed8"} err="failed to get container status \"2df6aadbe568d17b58738f194596f0e83432176769f7c5bd87e7d4000d3f1ed8\": rpc error: code = NotFound desc = could not find container \"2df6aadbe568d17b58738f194596f0e83432176769f7c5bd87e7d4000d3f1ed8\": container with ID starting with 2df6aadbe568d17b58738f194596f0e83432176769f7c5bd87e7d4000d3f1ed8 not found: ID does not exist" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.748796 4875 scope.go:117] "RemoveContainer" containerID="e5e9d96fa7102ca0dcbb0fcc156b21899217a32baae85be23379cca6c8a6a93c" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.749082 4875 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e5e9d96fa7102ca0dcbb0fcc156b21899217a32baae85be23379cca6c8a6a93c"} err="failed to get container status \"e5e9d96fa7102ca0dcbb0fcc156b21899217a32baae85be23379cca6c8a6a93c\": rpc error: code = NotFound desc = could not find container \"e5e9d96fa7102ca0dcbb0fcc156b21899217a32baae85be23379cca6c8a6a93c\": container with ID starting with e5e9d96fa7102ca0dcbb0fcc156b21899217a32baae85be23379cca6c8a6a93c not found: ID does not exist" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.749108 4875 scope.go:117] "RemoveContainer" containerID="3c944396320312ab92eb8dd7f1d771a00b965031717edbbeb2b467cee0788dec" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.749392 4875 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3c944396320312ab92eb8dd7f1d771a00b965031717edbbeb2b467cee0788dec"} err="failed to get container status \"3c944396320312ab92eb8dd7f1d771a00b965031717edbbeb2b467cee0788dec\": rpc error: code = NotFound desc = could not find container \"3c944396320312ab92eb8dd7f1d771a00b965031717edbbeb2b467cee0788dec\": container with ID starting with 3c944396320312ab92eb8dd7f1d771a00b965031717edbbeb2b467cee0788dec not found: ID does not exist" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.749433 4875 scope.go:117] "RemoveContainer" containerID="6db9e77922c120dbfa1ccd55c96777ac3b18e0fcf8bb96956414858ae2c8fcff" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.749777 4875 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6db9e77922c120dbfa1ccd55c96777ac3b18e0fcf8bb96956414858ae2c8fcff"} err="failed to get container status \"6db9e77922c120dbfa1ccd55c96777ac3b18e0fcf8bb96956414858ae2c8fcff\": rpc error: code = NotFound desc = could not find container \"6db9e77922c120dbfa1ccd55c96777ac3b18e0fcf8bb96956414858ae2c8fcff\": container with ID starting with 6db9e77922c120dbfa1ccd55c96777ac3b18e0fcf8bb96956414858ae2c8fcff not found: ID does not exist" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.749806 4875 scope.go:117] "RemoveContainer" containerID="e7cdfd8fed2f21e7063841e5382e668130288feffd52ef6fc5f9b5bc1894f2a9" Oct 07 08:06:18 crc kubenswrapper[4875]: I1007 08:06:18.750158 4875 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e7cdfd8fed2f21e7063841e5382e668130288feffd52ef6fc5f9b5bc1894f2a9"} err="failed to get container status \"e7cdfd8fed2f21e7063841e5382e668130288feffd52ef6fc5f9b5bc1894f2a9\": rpc error: code = NotFound desc = could not find container \"e7cdfd8fed2f21e7063841e5382e668130288feffd52ef6fc5f9b5bc1894f2a9\": container with ID starting with e7cdfd8fed2f21e7063841e5382e668130288feffd52ef6fc5f9b5bc1894f2a9 not found: ID does not exist" Oct 07 08:06:19 crc kubenswrapper[4875]: I1007 08:06:19.500186 4875 generic.go:334] "Generic (PLEG): container finished" podID="70ddd5a9-ba2d-46f7-8ac9-abcedab76308" containerID="7ddf0f754b26d250e43e9d3cffdb9ea5af5a3ad4711d105604a890044424f632" exitCode=0 Oct 07 08:06:19 crc kubenswrapper[4875]: I1007 08:06:19.500294 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jmv2v" event={"ID":"70ddd5a9-ba2d-46f7-8ac9-abcedab76308","Type":"ContainerDied","Data":"7ddf0f754b26d250e43e9d3cffdb9ea5af5a3ad4711d105604a890044424f632"} Oct 07 08:06:19 crc kubenswrapper[4875]: I1007 08:06:19.504142 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-wc2jq_e5a790e1-c591-4cfc-930f-4805a923790b/kube-multus/2.log" Oct 07 08:06:19 crc kubenswrapper[4875]: I1007 08:06:19.704070 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f7806e48-48e7-4680-af2e-e93a05003370" path="/var/lib/kubelet/pods/f7806e48-48e7-4680-af2e-e93a05003370/volumes" Oct 07 08:06:20 crc kubenswrapper[4875]: I1007 08:06:20.518544 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jmv2v" event={"ID":"70ddd5a9-ba2d-46f7-8ac9-abcedab76308","Type":"ContainerStarted","Data":"8416b518a533e55435652f0e155ae090aa342fa5a94584aaf45de9f01928b528"} Oct 07 08:06:20 crc kubenswrapper[4875]: I1007 08:06:20.518607 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jmv2v" event={"ID":"70ddd5a9-ba2d-46f7-8ac9-abcedab76308","Type":"ContainerStarted","Data":"e44925bf17a102250e3c310552028c9ec84304196040ae9e2609c849920d7ddb"} Oct 07 08:06:20 crc kubenswrapper[4875]: I1007 08:06:20.518625 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jmv2v" event={"ID":"70ddd5a9-ba2d-46f7-8ac9-abcedab76308","Type":"ContainerStarted","Data":"6aa72b0eb573d0f265a93d89f15d2ff99567372931dbd4dcc17291a2adb11e33"} Oct 07 08:06:20 crc kubenswrapper[4875]: I1007 08:06:20.518639 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jmv2v" event={"ID":"70ddd5a9-ba2d-46f7-8ac9-abcedab76308","Type":"ContainerStarted","Data":"54b2af3942ab27ae1263b008c5d250c47f1f1cfdc51e730d7557587fb0acb3b2"} Oct 07 08:06:20 crc kubenswrapper[4875]: I1007 08:06:20.518653 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jmv2v" event={"ID":"70ddd5a9-ba2d-46f7-8ac9-abcedab76308","Type":"ContainerStarted","Data":"9585b9114912f845659e7427627ccc91247c45fa603b2eb9f455885befd97484"} Oct 07 08:06:20 crc kubenswrapper[4875]: I1007 08:06:20.518668 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jmv2v" event={"ID":"70ddd5a9-ba2d-46f7-8ac9-abcedab76308","Type":"ContainerStarted","Data":"b0092eb574266aff87335923ca29ce009d915e86e853975d16bf6c94e60b3c66"} Oct 07 08:06:23 crc kubenswrapper[4875]: I1007 08:06:23.542969 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jmv2v" event={"ID":"70ddd5a9-ba2d-46f7-8ac9-abcedab76308","Type":"ContainerStarted","Data":"85be6d4f44126c419463658b4f73e4cae185f33de936ebf1edfe10fd6b826e0c"} Oct 07 08:06:25 crc kubenswrapper[4875]: I1007 08:06:25.558801 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jmv2v" event={"ID":"70ddd5a9-ba2d-46f7-8ac9-abcedab76308","Type":"ContainerStarted","Data":"580e8729b6da02c11b1598feb6525f5baf4c9195c1cfdd402aa36949687d3332"} Oct 07 08:06:25 crc kubenswrapper[4875]: I1007 08:06:25.559431 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-jmv2v" Oct 07 08:06:25 crc kubenswrapper[4875]: I1007 08:06:25.596651 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-jmv2v" Oct 07 08:06:25 crc kubenswrapper[4875]: I1007 08:06:25.602517 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-jmv2v" podStartSLOduration=8.6024897 podStartE2EDuration="8.6024897s" podCreationTimestamp="2025-10-07 08:06:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 08:06:25.597985291 +0000 UTC m=+610.557755854" watchObservedRunningTime="2025-10-07 08:06:25.6024897 +0000 UTC m=+610.562260263" Oct 07 08:06:26 crc kubenswrapper[4875]: I1007 08:06:26.573802 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-jmv2v" Oct 07 08:06:26 crc kubenswrapper[4875]: I1007 08:06:26.574988 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-jmv2v" Oct 07 08:06:26 crc kubenswrapper[4875]: I1007 08:06:26.610128 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-jmv2v" Oct 07 08:06:31 crc kubenswrapper[4875]: I1007 08:06:31.698243 4875 scope.go:117] "RemoveContainer" containerID="84832734a0dd2b492cf2c766fc86cc44f732aa4c8da2c4a22df1693d00110352" Oct 07 08:06:31 crc kubenswrapper[4875]: E1007 08:06:31.699412 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 20s restarting failed container=kube-multus pod=multus-wc2jq_openshift-multus(e5a790e1-c591-4cfc-930f-4805a923790b)\"" pod="openshift-multus/multus-wc2jq" podUID="e5a790e1-c591-4cfc-930f-4805a923790b" Oct 07 08:06:46 crc kubenswrapper[4875]: I1007 08:06:46.697946 4875 scope.go:117] "RemoveContainer" containerID="84832734a0dd2b492cf2c766fc86cc44f732aa4c8da2c4a22df1693d00110352" Oct 07 08:06:47 crc kubenswrapper[4875]: I1007 08:06:47.731268 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-wc2jq_e5a790e1-c591-4cfc-930f-4805a923790b/kube-multus/2.log" Oct 07 08:06:47 crc kubenswrapper[4875]: I1007 08:06:47.731854 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-wc2jq" event={"ID":"e5a790e1-c591-4cfc-930f-4805a923790b","Type":"ContainerStarted","Data":"441f924e390107e687b89bfc531b47dbb5919f98638507709e861acc06254d91"} Oct 07 08:06:48 crc kubenswrapper[4875]: I1007 08:06:48.363179 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-jmv2v" Oct 07 08:06:55 crc kubenswrapper[4875]: I1007 08:06:55.626995 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cg95v6"] Oct 07 08:06:55 crc kubenswrapper[4875]: I1007 08:06:55.628900 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cg95v6" Oct 07 08:06:55 crc kubenswrapper[4875]: I1007 08:06:55.631463 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Oct 07 08:06:55 crc kubenswrapper[4875]: I1007 08:06:55.636782 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cg95v6"] Oct 07 08:06:55 crc kubenswrapper[4875]: I1007 08:06:55.779544 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/5e2cee0f-3bcf-478e-9c4c-650f621b6856-util\") pod \"fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cg95v6\" (UID: \"5e2cee0f-3bcf-478e-9c4c-650f621b6856\") " pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cg95v6" Oct 07 08:06:55 crc kubenswrapper[4875]: I1007 08:06:55.779628 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cxm2l\" (UniqueName: \"kubernetes.io/projected/5e2cee0f-3bcf-478e-9c4c-650f621b6856-kube-api-access-cxm2l\") pod \"fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cg95v6\" (UID: \"5e2cee0f-3bcf-478e-9c4c-650f621b6856\") " pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cg95v6" Oct 07 08:06:55 crc kubenswrapper[4875]: I1007 08:06:55.780399 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/5e2cee0f-3bcf-478e-9c4c-650f621b6856-bundle\") pod \"fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cg95v6\" (UID: \"5e2cee0f-3bcf-478e-9c4c-650f621b6856\") " pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cg95v6" Oct 07 08:06:55 crc kubenswrapper[4875]: I1007 08:06:55.881814 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/5e2cee0f-3bcf-478e-9c4c-650f621b6856-bundle\") pod \"fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cg95v6\" (UID: \"5e2cee0f-3bcf-478e-9c4c-650f621b6856\") " pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cg95v6" Oct 07 08:06:55 crc kubenswrapper[4875]: I1007 08:06:55.881867 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/5e2cee0f-3bcf-478e-9c4c-650f621b6856-util\") pod \"fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cg95v6\" (UID: \"5e2cee0f-3bcf-478e-9c4c-650f621b6856\") " pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cg95v6" Oct 07 08:06:55 crc kubenswrapper[4875]: I1007 08:06:55.881918 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cxm2l\" (UniqueName: \"kubernetes.io/projected/5e2cee0f-3bcf-478e-9c4c-650f621b6856-kube-api-access-cxm2l\") pod \"fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cg95v6\" (UID: \"5e2cee0f-3bcf-478e-9c4c-650f621b6856\") " pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cg95v6" Oct 07 08:06:55 crc kubenswrapper[4875]: I1007 08:06:55.882747 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/5e2cee0f-3bcf-478e-9c4c-650f621b6856-util\") pod \"fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cg95v6\" (UID: \"5e2cee0f-3bcf-478e-9c4c-650f621b6856\") " pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cg95v6" Oct 07 08:06:55 crc kubenswrapper[4875]: I1007 08:06:55.883076 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/5e2cee0f-3bcf-478e-9c4c-650f621b6856-bundle\") pod \"fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cg95v6\" (UID: \"5e2cee0f-3bcf-478e-9c4c-650f621b6856\") " pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cg95v6" Oct 07 08:06:55 crc kubenswrapper[4875]: I1007 08:06:55.907701 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cxm2l\" (UniqueName: \"kubernetes.io/projected/5e2cee0f-3bcf-478e-9c4c-650f621b6856-kube-api-access-cxm2l\") pod \"fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cg95v6\" (UID: \"5e2cee0f-3bcf-478e-9c4c-650f621b6856\") " pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cg95v6" Oct 07 08:06:55 crc kubenswrapper[4875]: I1007 08:06:55.945646 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cg95v6" Oct 07 08:06:56 crc kubenswrapper[4875]: I1007 08:06:56.454984 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cg95v6"] Oct 07 08:06:56 crc kubenswrapper[4875]: I1007 08:06:56.778529 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cg95v6" event={"ID":"5e2cee0f-3bcf-478e-9c4c-650f621b6856","Type":"ContainerStarted","Data":"743dda99fc380d596c6046f51b0cc99b3efd01106d1611524b1eebc90c432fad"} Oct 07 08:06:56 crc kubenswrapper[4875]: I1007 08:06:56.778588 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cg95v6" event={"ID":"5e2cee0f-3bcf-478e-9c4c-650f621b6856","Type":"ContainerStarted","Data":"5468b9e0b0509f85392ef9e2c3271dd131cf970f44e98f3d1429bbf18379906f"} Oct 07 08:06:57 crc kubenswrapper[4875]: I1007 08:06:57.787826 4875 generic.go:334] "Generic (PLEG): container finished" podID="5e2cee0f-3bcf-478e-9c4c-650f621b6856" containerID="743dda99fc380d596c6046f51b0cc99b3efd01106d1611524b1eebc90c432fad" exitCode=0 Oct 07 08:06:57 crc kubenswrapper[4875]: I1007 08:06:57.787925 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cg95v6" event={"ID":"5e2cee0f-3bcf-478e-9c4c-650f621b6856","Type":"ContainerDied","Data":"743dda99fc380d596c6046f51b0cc99b3efd01106d1611524b1eebc90c432fad"} Oct 07 08:06:59 crc kubenswrapper[4875]: I1007 08:06:59.805718 4875 generic.go:334] "Generic (PLEG): container finished" podID="5e2cee0f-3bcf-478e-9c4c-650f621b6856" containerID="694016c9204e98fa9b936a0e17367197106809ce24f795dd71e1d576244ee6f8" exitCode=0 Oct 07 08:06:59 crc kubenswrapper[4875]: I1007 08:06:59.805775 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cg95v6" event={"ID":"5e2cee0f-3bcf-478e-9c4c-650f621b6856","Type":"ContainerDied","Data":"694016c9204e98fa9b936a0e17367197106809ce24f795dd71e1d576244ee6f8"} Oct 07 08:07:00 crc kubenswrapper[4875]: I1007 08:07:00.816681 4875 generic.go:334] "Generic (PLEG): container finished" podID="5e2cee0f-3bcf-478e-9c4c-650f621b6856" containerID="974d4847b9938641c39478fddf7b2f7e97c4fa0440270999a0fd2747bcb1f192" exitCode=0 Oct 07 08:07:00 crc kubenswrapper[4875]: I1007 08:07:00.816798 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cg95v6" event={"ID":"5e2cee0f-3bcf-478e-9c4c-650f621b6856","Type":"ContainerDied","Data":"974d4847b9938641c39478fddf7b2f7e97c4fa0440270999a0fd2747bcb1f192"} Oct 07 08:07:02 crc kubenswrapper[4875]: I1007 08:07:02.055175 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cg95v6" Oct 07 08:07:02 crc kubenswrapper[4875]: I1007 08:07:02.184631 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/5e2cee0f-3bcf-478e-9c4c-650f621b6856-util\") pod \"5e2cee0f-3bcf-478e-9c4c-650f621b6856\" (UID: \"5e2cee0f-3bcf-478e-9c4c-650f621b6856\") " Oct 07 08:07:02 crc kubenswrapper[4875]: I1007 08:07:02.184716 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cxm2l\" (UniqueName: \"kubernetes.io/projected/5e2cee0f-3bcf-478e-9c4c-650f621b6856-kube-api-access-cxm2l\") pod \"5e2cee0f-3bcf-478e-9c4c-650f621b6856\" (UID: \"5e2cee0f-3bcf-478e-9c4c-650f621b6856\") " Oct 07 08:07:02 crc kubenswrapper[4875]: I1007 08:07:02.184813 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/5e2cee0f-3bcf-478e-9c4c-650f621b6856-bundle\") pod \"5e2cee0f-3bcf-478e-9c4c-650f621b6856\" (UID: \"5e2cee0f-3bcf-478e-9c4c-650f621b6856\") " Oct 07 08:07:02 crc kubenswrapper[4875]: I1007 08:07:02.185952 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5e2cee0f-3bcf-478e-9c4c-650f621b6856-bundle" (OuterVolumeSpecName: "bundle") pod "5e2cee0f-3bcf-478e-9c4c-650f621b6856" (UID: "5e2cee0f-3bcf-478e-9c4c-650f621b6856"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 08:07:02 crc kubenswrapper[4875]: I1007 08:07:02.195205 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5e2cee0f-3bcf-478e-9c4c-650f621b6856-kube-api-access-cxm2l" (OuterVolumeSpecName: "kube-api-access-cxm2l") pod "5e2cee0f-3bcf-478e-9c4c-650f621b6856" (UID: "5e2cee0f-3bcf-478e-9c4c-650f621b6856"). InnerVolumeSpecName "kube-api-access-cxm2l". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 08:07:02 crc kubenswrapper[4875]: I1007 08:07:02.238707 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5e2cee0f-3bcf-478e-9c4c-650f621b6856-util" (OuterVolumeSpecName: "util") pod "5e2cee0f-3bcf-478e-9c4c-650f621b6856" (UID: "5e2cee0f-3bcf-478e-9c4c-650f621b6856"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 08:07:02 crc kubenswrapper[4875]: I1007 08:07:02.286644 4875 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/5e2cee0f-3bcf-478e-9c4c-650f621b6856-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 08:07:02 crc kubenswrapper[4875]: I1007 08:07:02.286688 4875 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/5e2cee0f-3bcf-478e-9c4c-650f621b6856-util\") on node \"crc\" DevicePath \"\"" Oct 07 08:07:02 crc kubenswrapper[4875]: I1007 08:07:02.286698 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cxm2l\" (UniqueName: \"kubernetes.io/projected/5e2cee0f-3bcf-478e-9c4c-650f621b6856-kube-api-access-cxm2l\") on node \"crc\" DevicePath \"\"" Oct 07 08:07:02 crc kubenswrapper[4875]: I1007 08:07:02.837137 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cg95v6" event={"ID":"5e2cee0f-3bcf-478e-9c4c-650f621b6856","Type":"ContainerDied","Data":"5468b9e0b0509f85392ef9e2c3271dd131cf970f44e98f3d1429bbf18379906f"} Oct 07 08:07:02 crc kubenswrapper[4875]: I1007 08:07:02.837188 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cg95v6" Oct 07 08:07:02 crc kubenswrapper[4875]: I1007 08:07:02.837193 4875 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5468b9e0b0509f85392ef9e2c3271dd131cf970f44e98f3d1429bbf18379906f" Oct 07 08:07:07 crc kubenswrapper[4875]: I1007 08:07:07.186931 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-operator-858ddd8f98-t9gm5"] Oct 07 08:07:07 crc kubenswrapper[4875]: E1007 08:07:07.187139 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5e2cee0f-3bcf-478e-9c4c-650f621b6856" containerName="pull" Oct 07 08:07:07 crc kubenswrapper[4875]: I1007 08:07:07.187150 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="5e2cee0f-3bcf-478e-9c4c-650f621b6856" containerName="pull" Oct 07 08:07:07 crc kubenswrapper[4875]: E1007 08:07:07.187165 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5e2cee0f-3bcf-478e-9c4c-650f621b6856" containerName="extract" Oct 07 08:07:07 crc kubenswrapper[4875]: I1007 08:07:07.187172 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="5e2cee0f-3bcf-478e-9c4c-650f621b6856" containerName="extract" Oct 07 08:07:07 crc kubenswrapper[4875]: E1007 08:07:07.187182 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5e2cee0f-3bcf-478e-9c4c-650f621b6856" containerName="util" Oct 07 08:07:07 crc kubenswrapper[4875]: I1007 08:07:07.187188 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="5e2cee0f-3bcf-478e-9c4c-650f621b6856" containerName="util" Oct 07 08:07:07 crc kubenswrapper[4875]: I1007 08:07:07.187296 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="5e2cee0f-3bcf-478e-9c4c-650f621b6856" containerName="extract" Oct 07 08:07:07 crc kubenswrapper[4875]: I1007 08:07:07.187720 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-858ddd8f98-t9gm5" Oct 07 08:07:07 crc kubenswrapper[4875]: I1007 08:07:07.190579 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"openshift-service-ca.crt" Oct 07 08:07:07 crc kubenswrapper[4875]: I1007 08:07:07.190659 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-operator-dockercfg-4mhxr" Oct 07 08:07:07 crc kubenswrapper[4875]: I1007 08:07:07.190768 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"kube-root-ca.crt" Oct 07 08:07:07 crc kubenswrapper[4875]: I1007 08:07:07.199541 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-858ddd8f98-t9gm5"] Oct 07 08:07:07 crc kubenswrapper[4875]: I1007 08:07:07.366319 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jtbdt\" (UniqueName: \"kubernetes.io/projected/53a570a0-5097-4c09-af4b-8bca758b17b6-kube-api-access-jtbdt\") pod \"nmstate-operator-858ddd8f98-t9gm5\" (UID: \"53a570a0-5097-4c09-af4b-8bca758b17b6\") " pod="openshift-nmstate/nmstate-operator-858ddd8f98-t9gm5" Oct 07 08:07:07 crc kubenswrapper[4875]: I1007 08:07:07.467255 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jtbdt\" (UniqueName: \"kubernetes.io/projected/53a570a0-5097-4c09-af4b-8bca758b17b6-kube-api-access-jtbdt\") pod \"nmstate-operator-858ddd8f98-t9gm5\" (UID: \"53a570a0-5097-4c09-af4b-8bca758b17b6\") " pod="openshift-nmstate/nmstate-operator-858ddd8f98-t9gm5" Oct 07 08:07:07 crc kubenswrapper[4875]: I1007 08:07:07.492593 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jtbdt\" (UniqueName: \"kubernetes.io/projected/53a570a0-5097-4c09-af4b-8bca758b17b6-kube-api-access-jtbdt\") pod \"nmstate-operator-858ddd8f98-t9gm5\" (UID: \"53a570a0-5097-4c09-af4b-8bca758b17b6\") " pod="openshift-nmstate/nmstate-operator-858ddd8f98-t9gm5" Oct 07 08:07:07 crc kubenswrapper[4875]: I1007 08:07:07.511179 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-858ddd8f98-t9gm5" Oct 07 08:07:07 crc kubenswrapper[4875]: I1007 08:07:07.749355 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-858ddd8f98-t9gm5"] Oct 07 08:07:07 crc kubenswrapper[4875]: I1007 08:07:07.866903 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-858ddd8f98-t9gm5" event={"ID":"53a570a0-5097-4c09-af4b-8bca758b17b6","Type":"ContainerStarted","Data":"ff819ae1395b6677acdfb34edb64b183fafb6b07b7b50d0ef72c059f89a24273"} Oct 07 08:07:10 crc kubenswrapper[4875]: I1007 08:07:10.890184 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-858ddd8f98-t9gm5" event={"ID":"53a570a0-5097-4c09-af4b-8bca758b17b6","Type":"ContainerStarted","Data":"51c5053414d3b1fbc4f12e5c6497ada6ab6575cce6cf5b8efa3367d0ac6284a3"} Oct 07 08:07:10 crc kubenswrapper[4875]: I1007 08:07:10.919727 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-operator-858ddd8f98-t9gm5" podStartSLOduration=1.760334171 podStartE2EDuration="3.91969571s" podCreationTimestamp="2025-10-07 08:07:07 +0000 UTC" firstStartedPulling="2025-10-07 08:07:07.757839416 +0000 UTC m=+652.717609959" lastFinishedPulling="2025-10-07 08:07:09.917200915 +0000 UTC m=+654.876971498" observedRunningTime="2025-10-07 08:07:10.917049929 +0000 UTC m=+655.876820542" watchObservedRunningTime="2025-10-07 08:07:10.91969571 +0000 UTC m=+655.879466253" Oct 07 08:07:16 crc kubenswrapper[4875]: I1007 08:07:16.228808 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-metrics-fdff9cb8d-lrxvs"] Oct 07 08:07:16 crc kubenswrapper[4875]: I1007 08:07:16.234815 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-fdff9cb8d-lrxvs" Oct 07 08:07:16 crc kubenswrapper[4875]: I1007 08:07:16.236183 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-webhook-6cdbc54649-sv4nj"] Oct 07 08:07:16 crc kubenswrapper[4875]: I1007 08:07:16.242336 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-6cdbc54649-sv4nj" Oct 07 08:07:16 crc kubenswrapper[4875]: I1007 08:07:16.244342 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"openshift-nmstate-webhook" Oct 07 08:07:16 crc kubenswrapper[4875]: I1007 08:07:16.244680 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-handler-dockercfg-j8wjw" Oct 07 08:07:16 crc kubenswrapper[4875]: I1007 08:07:16.252385 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-fdff9cb8d-lrxvs"] Oct 07 08:07:16 crc kubenswrapper[4875]: I1007 08:07:16.257284 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-6cdbc54649-sv4nj"] Oct 07 08:07:16 crc kubenswrapper[4875]: I1007 08:07:16.260587 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-handler-cjhsf"] Oct 07 08:07:16 crc kubenswrapper[4875]: I1007 08:07:16.261498 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-cjhsf" Oct 07 08:07:16 crc kubenswrapper[4875]: I1007 08:07:16.325254 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lv6p9\" (UniqueName: \"kubernetes.io/projected/a15028de-2729-40bb-add3-8d042826a0e5-kube-api-access-lv6p9\") pod \"nmstate-metrics-fdff9cb8d-lrxvs\" (UID: \"a15028de-2729-40bb-add3-8d042826a0e5\") " pod="openshift-nmstate/nmstate-metrics-fdff9cb8d-lrxvs" Oct 07 08:07:16 crc kubenswrapper[4875]: I1007 08:07:16.325338 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-phrl9\" (UniqueName: \"kubernetes.io/projected/546057f3-6bcf-4fd9-9c8d-1d21f13a70f8-kube-api-access-phrl9\") pod \"nmstate-handler-cjhsf\" (UID: \"546057f3-6bcf-4fd9-9c8d-1d21f13a70f8\") " pod="openshift-nmstate/nmstate-handler-cjhsf" Oct 07 08:07:16 crc kubenswrapper[4875]: I1007 08:07:16.325394 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/546057f3-6bcf-4fd9-9c8d-1d21f13a70f8-ovs-socket\") pod \"nmstate-handler-cjhsf\" (UID: \"546057f3-6bcf-4fd9-9c8d-1d21f13a70f8\") " pod="openshift-nmstate/nmstate-handler-cjhsf" Oct 07 08:07:16 crc kubenswrapper[4875]: I1007 08:07:16.325426 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/9bb1aee8-fddf-47ed-8bf5-a4f7310c6c2f-tls-key-pair\") pod \"nmstate-webhook-6cdbc54649-sv4nj\" (UID: \"9bb1aee8-fddf-47ed-8bf5-a4f7310c6c2f\") " pod="openshift-nmstate/nmstate-webhook-6cdbc54649-sv4nj" Oct 07 08:07:16 crc kubenswrapper[4875]: I1007 08:07:16.325454 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2ws2n\" (UniqueName: \"kubernetes.io/projected/9bb1aee8-fddf-47ed-8bf5-a4f7310c6c2f-kube-api-access-2ws2n\") pod \"nmstate-webhook-6cdbc54649-sv4nj\" (UID: \"9bb1aee8-fddf-47ed-8bf5-a4f7310c6c2f\") " pod="openshift-nmstate/nmstate-webhook-6cdbc54649-sv4nj" Oct 07 08:07:16 crc kubenswrapper[4875]: I1007 08:07:16.325500 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/546057f3-6bcf-4fd9-9c8d-1d21f13a70f8-nmstate-lock\") pod \"nmstate-handler-cjhsf\" (UID: \"546057f3-6bcf-4fd9-9c8d-1d21f13a70f8\") " pod="openshift-nmstate/nmstate-handler-cjhsf" Oct 07 08:07:16 crc kubenswrapper[4875]: I1007 08:07:16.325524 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/546057f3-6bcf-4fd9-9c8d-1d21f13a70f8-dbus-socket\") pod \"nmstate-handler-cjhsf\" (UID: \"546057f3-6bcf-4fd9-9c8d-1d21f13a70f8\") " pod="openshift-nmstate/nmstate-handler-cjhsf" Oct 07 08:07:16 crc kubenswrapper[4875]: I1007 08:07:16.380908 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-console-plugin-6b874cbd85-fmrrb"] Oct 07 08:07:16 crc kubenswrapper[4875]: I1007 08:07:16.381752 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-fmrrb" Oct 07 08:07:16 crc kubenswrapper[4875]: I1007 08:07:16.385753 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"default-dockercfg-bxnhk" Oct 07 08:07:16 crc kubenswrapper[4875]: I1007 08:07:16.386050 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"nginx-conf" Oct 07 08:07:16 crc kubenswrapper[4875]: I1007 08:07:16.388530 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"plugin-serving-cert" Oct 07 08:07:16 crc kubenswrapper[4875]: I1007 08:07:16.403962 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-6b874cbd85-fmrrb"] Oct 07 08:07:16 crc kubenswrapper[4875]: I1007 08:07:16.426592 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/546057f3-6bcf-4fd9-9c8d-1d21f13a70f8-ovs-socket\") pod \"nmstate-handler-cjhsf\" (UID: \"546057f3-6bcf-4fd9-9c8d-1d21f13a70f8\") " pod="openshift-nmstate/nmstate-handler-cjhsf" Oct 07 08:07:16 crc kubenswrapper[4875]: I1007 08:07:16.426665 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/9bb1aee8-fddf-47ed-8bf5-a4f7310c6c2f-tls-key-pair\") pod \"nmstate-webhook-6cdbc54649-sv4nj\" (UID: \"9bb1aee8-fddf-47ed-8bf5-a4f7310c6c2f\") " pod="openshift-nmstate/nmstate-webhook-6cdbc54649-sv4nj" Oct 07 08:07:16 crc kubenswrapper[4875]: I1007 08:07:16.426694 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2ws2n\" (UniqueName: \"kubernetes.io/projected/9bb1aee8-fddf-47ed-8bf5-a4f7310c6c2f-kube-api-access-2ws2n\") pod \"nmstate-webhook-6cdbc54649-sv4nj\" (UID: \"9bb1aee8-fddf-47ed-8bf5-a4f7310c6c2f\") " pod="openshift-nmstate/nmstate-webhook-6cdbc54649-sv4nj" Oct 07 08:07:16 crc kubenswrapper[4875]: I1007 08:07:16.426728 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/85fcbeae-845a-48a4-8aea-e21d5df0e1fd-nginx-conf\") pod \"nmstate-console-plugin-6b874cbd85-fmrrb\" (UID: \"85fcbeae-845a-48a4-8aea-e21d5df0e1fd\") " pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-fmrrb" Oct 07 08:07:16 crc kubenswrapper[4875]: I1007 08:07:16.426754 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/85fcbeae-845a-48a4-8aea-e21d5df0e1fd-plugin-serving-cert\") pod \"nmstate-console-plugin-6b874cbd85-fmrrb\" (UID: \"85fcbeae-845a-48a4-8aea-e21d5df0e1fd\") " pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-fmrrb" Oct 07 08:07:16 crc kubenswrapper[4875]: I1007 08:07:16.426781 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zlkdj\" (UniqueName: \"kubernetes.io/projected/85fcbeae-845a-48a4-8aea-e21d5df0e1fd-kube-api-access-zlkdj\") pod \"nmstate-console-plugin-6b874cbd85-fmrrb\" (UID: \"85fcbeae-845a-48a4-8aea-e21d5df0e1fd\") " pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-fmrrb" Oct 07 08:07:16 crc kubenswrapper[4875]: I1007 08:07:16.426806 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/546057f3-6bcf-4fd9-9c8d-1d21f13a70f8-nmstate-lock\") pod \"nmstate-handler-cjhsf\" (UID: \"546057f3-6bcf-4fd9-9c8d-1d21f13a70f8\") " pod="openshift-nmstate/nmstate-handler-cjhsf" Oct 07 08:07:16 crc kubenswrapper[4875]: I1007 08:07:16.426829 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/546057f3-6bcf-4fd9-9c8d-1d21f13a70f8-dbus-socket\") pod \"nmstate-handler-cjhsf\" (UID: \"546057f3-6bcf-4fd9-9c8d-1d21f13a70f8\") " pod="openshift-nmstate/nmstate-handler-cjhsf" Oct 07 08:07:16 crc kubenswrapper[4875]: I1007 08:07:16.426852 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lv6p9\" (UniqueName: \"kubernetes.io/projected/a15028de-2729-40bb-add3-8d042826a0e5-kube-api-access-lv6p9\") pod \"nmstate-metrics-fdff9cb8d-lrxvs\" (UID: \"a15028de-2729-40bb-add3-8d042826a0e5\") " pod="openshift-nmstate/nmstate-metrics-fdff9cb8d-lrxvs" Oct 07 08:07:16 crc kubenswrapper[4875]: I1007 08:07:16.426908 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-phrl9\" (UniqueName: \"kubernetes.io/projected/546057f3-6bcf-4fd9-9c8d-1d21f13a70f8-kube-api-access-phrl9\") pod \"nmstate-handler-cjhsf\" (UID: \"546057f3-6bcf-4fd9-9c8d-1d21f13a70f8\") " pod="openshift-nmstate/nmstate-handler-cjhsf" Oct 07 08:07:16 crc kubenswrapper[4875]: I1007 08:07:16.427370 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/546057f3-6bcf-4fd9-9c8d-1d21f13a70f8-nmstate-lock\") pod \"nmstate-handler-cjhsf\" (UID: \"546057f3-6bcf-4fd9-9c8d-1d21f13a70f8\") " pod="openshift-nmstate/nmstate-handler-cjhsf" Oct 07 08:07:16 crc kubenswrapper[4875]: E1007 08:07:16.427544 4875 secret.go:188] Couldn't get secret openshift-nmstate/openshift-nmstate-webhook: secret "openshift-nmstate-webhook" not found Oct 07 08:07:16 crc kubenswrapper[4875]: I1007 08:07:16.427580 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/546057f3-6bcf-4fd9-9c8d-1d21f13a70f8-ovs-socket\") pod \"nmstate-handler-cjhsf\" (UID: \"546057f3-6bcf-4fd9-9c8d-1d21f13a70f8\") " pod="openshift-nmstate/nmstate-handler-cjhsf" Oct 07 08:07:16 crc kubenswrapper[4875]: I1007 08:07:16.427547 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/546057f3-6bcf-4fd9-9c8d-1d21f13a70f8-dbus-socket\") pod \"nmstate-handler-cjhsf\" (UID: \"546057f3-6bcf-4fd9-9c8d-1d21f13a70f8\") " pod="openshift-nmstate/nmstate-handler-cjhsf" Oct 07 08:07:16 crc kubenswrapper[4875]: E1007 08:07:16.427616 4875 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/9bb1aee8-fddf-47ed-8bf5-a4f7310c6c2f-tls-key-pair podName:9bb1aee8-fddf-47ed-8bf5-a4f7310c6c2f nodeName:}" failed. No retries permitted until 2025-10-07 08:07:16.927591028 +0000 UTC m=+661.887361571 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "tls-key-pair" (UniqueName: "kubernetes.io/secret/9bb1aee8-fddf-47ed-8bf5-a4f7310c6c2f-tls-key-pair") pod "nmstate-webhook-6cdbc54649-sv4nj" (UID: "9bb1aee8-fddf-47ed-8bf5-a4f7310c6c2f") : secret "openshift-nmstate-webhook" not found Oct 07 08:07:16 crc kubenswrapper[4875]: I1007 08:07:16.450171 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2ws2n\" (UniqueName: \"kubernetes.io/projected/9bb1aee8-fddf-47ed-8bf5-a4f7310c6c2f-kube-api-access-2ws2n\") pod \"nmstate-webhook-6cdbc54649-sv4nj\" (UID: \"9bb1aee8-fddf-47ed-8bf5-a4f7310c6c2f\") " pod="openshift-nmstate/nmstate-webhook-6cdbc54649-sv4nj" Oct 07 08:07:16 crc kubenswrapper[4875]: I1007 08:07:16.450213 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lv6p9\" (UniqueName: \"kubernetes.io/projected/a15028de-2729-40bb-add3-8d042826a0e5-kube-api-access-lv6p9\") pod \"nmstate-metrics-fdff9cb8d-lrxvs\" (UID: \"a15028de-2729-40bb-add3-8d042826a0e5\") " pod="openshift-nmstate/nmstate-metrics-fdff9cb8d-lrxvs" Oct 07 08:07:16 crc kubenswrapper[4875]: I1007 08:07:16.451554 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-phrl9\" (UniqueName: \"kubernetes.io/projected/546057f3-6bcf-4fd9-9c8d-1d21f13a70f8-kube-api-access-phrl9\") pod \"nmstate-handler-cjhsf\" (UID: \"546057f3-6bcf-4fd9-9c8d-1d21f13a70f8\") " pod="openshift-nmstate/nmstate-handler-cjhsf" Oct 07 08:07:16 crc kubenswrapper[4875]: I1007 08:07:16.528647 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/85fcbeae-845a-48a4-8aea-e21d5df0e1fd-nginx-conf\") pod \"nmstate-console-plugin-6b874cbd85-fmrrb\" (UID: \"85fcbeae-845a-48a4-8aea-e21d5df0e1fd\") " pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-fmrrb" Oct 07 08:07:16 crc kubenswrapper[4875]: I1007 08:07:16.528704 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/85fcbeae-845a-48a4-8aea-e21d5df0e1fd-plugin-serving-cert\") pod \"nmstate-console-plugin-6b874cbd85-fmrrb\" (UID: \"85fcbeae-845a-48a4-8aea-e21d5df0e1fd\") " pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-fmrrb" Oct 07 08:07:16 crc kubenswrapper[4875]: I1007 08:07:16.528730 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zlkdj\" (UniqueName: \"kubernetes.io/projected/85fcbeae-845a-48a4-8aea-e21d5df0e1fd-kube-api-access-zlkdj\") pod \"nmstate-console-plugin-6b874cbd85-fmrrb\" (UID: \"85fcbeae-845a-48a4-8aea-e21d5df0e1fd\") " pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-fmrrb" Oct 07 08:07:16 crc kubenswrapper[4875]: I1007 08:07:16.529899 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/85fcbeae-845a-48a4-8aea-e21d5df0e1fd-nginx-conf\") pod \"nmstate-console-plugin-6b874cbd85-fmrrb\" (UID: \"85fcbeae-845a-48a4-8aea-e21d5df0e1fd\") " pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-fmrrb" Oct 07 08:07:16 crc kubenswrapper[4875]: I1007 08:07:16.532468 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/85fcbeae-845a-48a4-8aea-e21d5df0e1fd-plugin-serving-cert\") pod \"nmstate-console-plugin-6b874cbd85-fmrrb\" (UID: \"85fcbeae-845a-48a4-8aea-e21d5df0e1fd\") " pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-fmrrb" Oct 07 08:07:16 crc kubenswrapper[4875]: I1007 08:07:16.547660 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zlkdj\" (UniqueName: \"kubernetes.io/projected/85fcbeae-845a-48a4-8aea-e21d5df0e1fd-kube-api-access-zlkdj\") pod \"nmstate-console-plugin-6b874cbd85-fmrrb\" (UID: \"85fcbeae-845a-48a4-8aea-e21d5df0e1fd\") " pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-fmrrb" Oct 07 08:07:16 crc kubenswrapper[4875]: I1007 08:07:16.557618 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-fdff9cb8d-lrxvs" Oct 07 08:07:16 crc kubenswrapper[4875]: I1007 08:07:16.574579 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-6d7bcd6f86-gfk9f"] Oct 07 08:07:16 crc kubenswrapper[4875]: I1007 08:07:16.575617 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-6d7bcd6f86-gfk9f" Oct 07 08:07:16 crc kubenswrapper[4875]: I1007 08:07:16.584210 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-cjhsf" Oct 07 08:07:16 crc kubenswrapper[4875]: I1007 08:07:16.605440 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-6d7bcd6f86-gfk9f"] Oct 07 08:07:16 crc kubenswrapper[4875]: I1007 08:07:16.630599 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/2ba46891-59dd-4689-9cb7-d3da6ba7685d-console-config\") pod \"console-6d7bcd6f86-gfk9f\" (UID: \"2ba46891-59dd-4689-9cb7-d3da6ba7685d\") " pod="openshift-console/console-6d7bcd6f86-gfk9f" Oct 07 08:07:16 crc kubenswrapper[4875]: I1007 08:07:16.631069 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2ba46891-59dd-4689-9cb7-d3da6ba7685d-trusted-ca-bundle\") pod \"console-6d7bcd6f86-gfk9f\" (UID: \"2ba46891-59dd-4689-9cb7-d3da6ba7685d\") " pod="openshift-console/console-6d7bcd6f86-gfk9f" Oct 07 08:07:16 crc kubenswrapper[4875]: I1007 08:07:16.631102 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/2ba46891-59dd-4689-9cb7-d3da6ba7685d-service-ca\") pod \"console-6d7bcd6f86-gfk9f\" (UID: \"2ba46891-59dd-4689-9cb7-d3da6ba7685d\") " pod="openshift-console/console-6d7bcd6f86-gfk9f" Oct 07 08:07:16 crc kubenswrapper[4875]: I1007 08:07:16.631120 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/2ba46891-59dd-4689-9cb7-d3da6ba7685d-console-oauth-config\") pod \"console-6d7bcd6f86-gfk9f\" (UID: \"2ba46891-59dd-4689-9cb7-d3da6ba7685d\") " pod="openshift-console/console-6d7bcd6f86-gfk9f" Oct 07 08:07:16 crc kubenswrapper[4875]: I1007 08:07:16.631158 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/2ba46891-59dd-4689-9cb7-d3da6ba7685d-oauth-serving-cert\") pod \"console-6d7bcd6f86-gfk9f\" (UID: \"2ba46891-59dd-4689-9cb7-d3da6ba7685d\") " pod="openshift-console/console-6d7bcd6f86-gfk9f" Oct 07 08:07:16 crc kubenswrapper[4875]: I1007 08:07:16.631187 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/2ba46891-59dd-4689-9cb7-d3da6ba7685d-console-serving-cert\") pod \"console-6d7bcd6f86-gfk9f\" (UID: \"2ba46891-59dd-4689-9cb7-d3da6ba7685d\") " pod="openshift-console/console-6d7bcd6f86-gfk9f" Oct 07 08:07:16 crc kubenswrapper[4875]: I1007 08:07:16.631248 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zdr9j\" (UniqueName: \"kubernetes.io/projected/2ba46891-59dd-4689-9cb7-d3da6ba7685d-kube-api-access-zdr9j\") pod \"console-6d7bcd6f86-gfk9f\" (UID: \"2ba46891-59dd-4689-9cb7-d3da6ba7685d\") " pod="openshift-console/console-6d7bcd6f86-gfk9f" Oct 07 08:07:16 crc kubenswrapper[4875]: I1007 08:07:16.694850 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-fmrrb" Oct 07 08:07:16 crc kubenswrapper[4875]: I1007 08:07:16.731987 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/2ba46891-59dd-4689-9cb7-d3da6ba7685d-oauth-serving-cert\") pod \"console-6d7bcd6f86-gfk9f\" (UID: \"2ba46891-59dd-4689-9cb7-d3da6ba7685d\") " pod="openshift-console/console-6d7bcd6f86-gfk9f" Oct 07 08:07:16 crc kubenswrapper[4875]: I1007 08:07:16.732046 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/2ba46891-59dd-4689-9cb7-d3da6ba7685d-console-serving-cert\") pod \"console-6d7bcd6f86-gfk9f\" (UID: \"2ba46891-59dd-4689-9cb7-d3da6ba7685d\") " pod="openshift-console/console-6d7bcd6f86-gfk9f" Oct 07 08:07:16 crc kubenswrapper[4875]: I1007 08:07:16.732116 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zdr9j\" (UniqueName: \"kubernetes.io/projected/2ba46891-59dd-4689-9cb7-d3da6ba7685d-kube-api-access-zdr9j\") pod \"console-6d7bcd6f86-gfk9f\" (UID: \"2ba46891-59dd-4689-9cb7-d3da6ba7685d\") " pod="openshift-console/console-6d7bcd6f86-gfk9f" Oct 07 08:07:16 crc kubenswrapper[4875]: I1007 08:07:16.732145 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/2ba46891-59dd-4689-9cb7-d3da6ba7685d-console-config\") pod \"console-6d7bcd6f86-gfk9f\" (UID: \"2ba46891-59dd-4689-9cb7-d3da6ba7685d\") " pod="openshift-console/console-6d7bcd6f86-gfk9f" Oct 07 08:07:16 crc kubenswrapper[4875]: I1007 08:07:16.732161 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2ba46891-59dd-4689-9cb7-d3da6ba7685d-trusted-ca-bundle\") pod \"console-6d7bcd6f86-gfk9f\" (UID: \"2ba46891-59dd-4689-9cb7-d3da6ba7685d\") " pod="openshift-console/console-6d7bcd6f86-gfk9f" Oct 07 08:07:16 crc kubenswrapper[4875]: I1007 08:07:16.732181 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/2ba46891-59dd-4689-9cb7-d3da6ba7685d-service-ca\") pod \"console-6d7bcd6f86-gfk9f\" (UID: \"2ba46891-59dd-4689-9cb7-d3da6ba7685d\") " pod="openshift-console/console-6d7bcd6f86-gfk9f" Oct 07 08:07:16 crc kubenswrapper[4875]: I1007 08:07:16.732199 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/2ba46891-59dd-4689-9cb7-d3da6ba7685d-console-oauth-config\") pod \"console-6d7bcd6f86-gfk9f\" (UID: \"2ba46891-59dd-4689-9cb7-d3da6ba7685d\") " pod="openshift-console/console-6d7bcd6f86-gfk9f" Oct 07 08:07:16 crc kubenswrapper[4875]: I1007 08:07:16.733482 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/2ba46891-59dd-4689-9cb7-d3da6ba7685d-oauth-serving-cert\") pod \"console-6d7bcd6f86-gfk9f\" (UID: \"2ba46891-59dd-4689-9cb7-d3da6ba7685d\") " pod="openshift-console/console-6d7bcd6f86-gfk9f" Oct 07 08:07:16 crc kubenswrapper[4875]: I1007 08:07:16.733741 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2ba46891-59dd-4689-9cb7-d3da6ba7685d-trusted-ca-bundle\") pod \"console-6d7bcd6f86-gfk9f\" (UID: \"2ba46891-59dd-4689-9cb7-d3da6ba7685d\") " pod="openshift-console/console-6d7bcd6f86-gfk9f" Oct 07 08:07:16 crc kubenswrapper[4875]: I1007 08:07:16.734334 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/2ba46891-59dd-4689-9cb7-d3da6ba7685d-service-ca\") pod \"console-6d7bcd6f86-gfk9f\" (UID: \"2ba46891-59dd-4689-9cb7-d3da6ba7685d\") " pod="openshift-console/console-6d7bcd6f86-gfk9f" Oct 07 08:07:16 crc kubenswrapper[4875]: I1007 08:07:16.734478 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/2ba46891-59dd-4689-9cb7-d3da6ba7685d-console-config\") pod \"console-6d7bcd6f86-gfk9f\" (UID: \"2ba46891-59dd-4689-9cb7-d3da6ba7685d\") " pod="openshift-console/console-6d7bcd6f86-gfk9f" Oct 07 08:07:16 crc kubenswrapper[4875]: I1007 08:07:16.741341 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/2ba46891-59dd-4689-9cb7-d3da6ba7685d-console-oauth-config\") pod \"console-6d7bcd6f86-gfk9f\" (UID: \"2ba46891-59dd-4689-9cb7-d3da6ba7685d\") " pod="openshift-console/console-6d7bcd6f86-gfk9f" Oct 07 08:07:16 crc kubenswrapper[4875]: I1007 08:07:16.741367 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/2ba46891-59dd-4689-9cb7-d3da6ba7685d-console-serving-cert\") pod \"console-6d7bcd6f86-gfk9f\" (UID: \"2ba46891-59dd-4689-9cb7-d3da6ba7685d\") " pod="openshift-console/console-6d7bcd6f86-gfk9f" Oct 07 08:07:16 crc kubenswrapper[4875]: I1007 08:07:16.756606 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zdr9j\" (UniqueName: \"kubernetes.io/projected/2ba46891-59dd-4689-9cb7-d3da6ba7685d-kube-api-access-zdr9j\") pod \"console-6d7bcd6f86-gfk9f\" (UID: \"2ba46891-59dd-4689-9cb7-d3da6ba7685d\") " pod="openshift-console/console-6d7bcd6f86-gfk9f" Oct 07 08:07:16 crc kubenswrapper[4875]: I1007 08:07:16.815528 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-fdff9cb8d-lrxvs"] Oct 07 08:07:16 crc kubenswrapper[4875]: I1007 08:07:16.917307 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-6b874cbd85-fmrrb"] Oct 07 08:07:16 crc kubenswrapper[4875]: W1007 08:07:16.924409 4875 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod85fcbeae_845a_48a4_8aea_e21d5df0e1fd.slice/crio-b9871627244e7e0323b445c50c5e2fd22901e5e17e74ee6e9f46acdc84fa1128 WatchSource:0}: Error finding container b9871627244e7e0323b445c50c5e2fd22901e5e17e74ee6e9f46acdc84fa1128: Status 404 returned error can't find the container with id b9871627244e7e0323b445c50c5e2fd22901e5e17e74ee6e9f46acdc84fa1128 Oct 07 08:07:16 crc kubenswrapper[4875]: I1007 08:07:16.937141 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/9bb1aee8-fddf-47ed-8bf5-a4f7310c6c2f-tls-key-pair\") pod \"nmstate-webhook-6cdbc54649-sv4nj\" (UID: \"9bb1aee8-fddf-47ed-8bf5-a4f7310c6c2f\") " pod="openshift-nmstate/nmstate-webhook-6cdbc54649-sv4nj" Oct 07 08:07:16 crc kubenswrapper[4875]: I1007 08:07:16.942274 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/9bb1aee8-fddf-47ed-8bf5-a4f7310c6c2f-tls-key-pair\") pod \"nmstate-webhook-6cdbc54649-sv4nj\" (UID: \"9bb1aee8-fddf-47ed-8bf5-a4f7310c6c2f\") " pod="openshift-nmstate/nmstate-webhook-6cdbc54649-sv4nj" Oct 07 08:07:16 crc kubenswrapper[4875]: I1007 08:07:16.948816 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-fmrrb" event={"ID":"85fcbeae-845a-48a4-8aea-e21d5df0e1fd","Type":"ContainerStarted","Data":"b9871627244e7e0323b445c50c5e2fd22901e5e17e74ee6e9f46acdc84fa1128"} Oct 07 08:07:16 crc kubenswrapper[4875]: I1007 08:07:16.949187 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-6d7bcd6f86-gfk9f" Oct 07 08:07:16 crc kubenswrapper[4875]: I1007 08:07:16.950497 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-fdff9cb8d-lrxvs" event={"ID":"a15028de-2729-40bb-add3-8d042826a0e5","Type":"ContainerStarted","Data":"7f66feb452cb60e2513003a95ff78cc9f230601aa9e1d6f69340a2be1936ac16"} Oct 07 08:07:16 crc kubenswrapper[4875]: I1007 08:07:16.952794 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-cjhsf" event={"ID":"546057f3-6bcf-4fd9-9c8d-1d21f13a70f8","Type":"ContainerStarted","Data":"deaedc6367d9a622b206878b2986d2c0c058d6046b2b3d580cff722b1069eb35"} Oct 07 08:07:17 crc kubenswrapper[4875]: I1007 08:07:17.170316 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-6cdbc54649-sv4nj" Oct 07 08:07:17 crc kubenswrapper[4875]: I1007 08:07:17.382217 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-6d7bcd6f86-gfk9f"] Oct 07 08:07:17 crc kubenswrapper[4875]: I1007 08:07:17.404110 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-6cdbc54649-sv4nj"] Oct 07 08:07:17 crc kubenswrapper[4875]: W1007 08:07:17.408414 4875 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9bb1aee8_fddf_47ed_8bf5_a4f7310c6c2f.slice/crio-e4b9c3502f970878feb0b277ec6572133421757a7771caf250a4606be5b2ded6 WatchSource:0}: Error finding container e4b9c3502f970878feb0b277ec6572133421757a7771caf250a4606be5b2ded6: Status 404 returned error can't find the container with id e4b9c3502f970878feb0b277ec6572133421757a7771caf250a4606be5b2ded6 Oct 07 08:07:17 crc kubenswrapper[4875]: I1007 08:07:17.964327 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-6d7bcd6f86-gfk9f" event={"ID":"2ba46891-59dd-4689-9cb7-d3da6ba7685d","Type":"ContainerStarted","Data":"53c1016acf99458427993c8b6e966aeacbcdf1ab59d0c61ad25b98066bba08d8"} Oct 07 08:07:17 crc kubenswrapper[4875]: I1007 08:07:17.964403 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-6d7bcd6f86-gfk9f" event={"ID":"2ba46891-59dd-4689-9cb7-d3da6ba7685d","Type":"ContainerStarted","Data":"94dbdf097d20b725b0360d6bed95af0a266ba545a2892eea575555fbbbd1cc20"} Oct 07 08:07:17 crc kubenswrapper[4875]: I1007 08:07:17.966322 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-6cdbc54649-sv4nj" event={"ID":"9bb1aee8-fddf-47ed-8bf5-a4f7310c6c2f","Type":"ContainerStarted","Data":"e4b9c3502f970878feb0b277ec6572133421757a7771caf250a4606be5b2ded6"} Oct 07 08:07:17 crc kubenswrapper[4875]: I1007 08:07:17.994968 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-6d7bcd6f86-gfk9f" podStartSLOduration=1.994928909 podStartE2EDuration="1.994928909s" podCreationTimestamp="2025-10-07 08:07:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 08:07:17.993802665 +0000 UTC m=+662.953573248" watchObservedRunningTime="2025-10-07 08:07:17.994928909 +0000 UTC m=+662.954699482" Oct 07 08:07:20 crc kubenswrapper[4875]: I1007 08:07:20.996609 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-fdff9cb8d-lrxvs" event={"ID":"a15028de-2729-40bb-add3-8d042826a0e5","Type":"ContainerStarted","Data":"afe81b42dca7d81f52949b9c42559a1dbf11f477420c778dcde7b9774d9c9949"} Oct 07 08:07:20 crc kubenswrapper[4875]: I1007 08:07:20.998756 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-cjhsf" event={"ID":"546057f3-6bcf-4fd9-9c8d-1d21f13a70f8","Type":"ContainerStarted","Data":"ab9b53f7de3b8b90bb91bd1bcd61531491f1a83875cde9648ee9fdb1c7ad794f"} Oct 07 08:07:20 crc kubenswrapper[4875]: I1007 08:07:20.999016 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-handler-cjhsf" Oct 07 08:07:21 crc kubenswrapper[4875]: I1007 08:07:21.002485 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-fmrrb" event={"ID":"85fcbeae-845a-48a4-8aea-e21d5df0e1fd","Type":"ContainerStarted","Data":"5fde1ec6004ffb1d7edd5f060de7e3b32d08ef7f879aa7ffc56326459ef09a11"} Oct 07 08:07:21 crc kubenswrapper[4875]: I1007 08:07:21.004506 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-6cdbc54649-sv4nj" event={"ID":"9bb1aee8-fddf-47ed-8bf5-a4f7310c6c2f","Type":"ContainerStarted","Data":"9cbe93bc3963c4932770fe3fe5703fdd96017fb243acda97f0762e6b773ce28b"} Oct 07 08:07:21 crc kubenswrapper[4875]: I1007 08:07:21.004635 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-webhook-6cdbc54649-sv4nj" Oct 07 08:07:21 crc kubenswrapper[4875]: I1007 08:07:21.026785 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-handler-cjhsf" podStartSLOduration=1.918573042 podStartE2EDuration="5.026751122s" podCreationTimestamp="2025-10-07 08:07:16 +0000 UTC" firstStartedPulling="2025-10-07 08:07:16.652861967 +0000 UTC m=+661.612632510" lastFinishedPulling="2025-10-07 08:07:19.761040007 +0000 UTC m=+664.720810590" observedRunningTime="2025-10-07 08:07:21.019084437 +0000 UTC m=+665.978855050" watchObservedRunningTime="2025-10-07 08:07:21.026751122 +0000 UTC m=+665.986521705" Oct 07 08:07:21 crc kubenswrapper[4875]: I1007 08:07:21.086406 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-fmrrb" podStartSLOduration=2.260514432 podStartE2EDuration="5.086387258s" podCreationTimestamp="2025-10-07 08:07:16 +0000 UTC" firstStartedPulling="2025-10-07 08:07:16.927108844 +0000 UTC m=+661.886879377" lastFinishedPulling="2025-10-07 08:07:19.75298166 +0000 UTC m=+664.712752203" observedRunningTime="2025-10-07 08:07:21.043137595 +0000 UTC m=+666.002908138" watchObservedRunningTime="2025-10-07 08:07:21.086387258 +0000 UTC m=+666.046157801" Oct 07 08:07:21 crc kubenswrapper[4875]: I1007 08:07:21.087018 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-webhook-6cdbc54649-sv4nj" podStartSLOduration=2.737264791 podStartE2EDuration="5.087014348s" podCreationTimestamp="2025-10-07 08:07:16 +0000 UTC" firstStartedPulling="2025-10-07 08:07:17.414413005 +0000 UTC m=+662.374183548" lastFinishedPulling="2025-10-07 08:07:19.764162532 +0000 UTC m=+664.723933105" observedRunningTime="2025-10-07 08:07:21.083656745 +0000 UTC m=+666.043427288" watchObservedRunningTime="2025-10-07 08:07:21.087014348 +0000 UTC m=+666.046784891" Oct 07 08:07:23 crc kubenswrapper[4875]: I1007 08:07:23.024068 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-fdff9cb8d-lrxvs" event={"ID":"a15028de-2729-40bb-add3-8d042826a0e5","Type":"ContainerStarted","Data":"9780c2f061fec3a0de1d456cd31a9ca98eab89323f47995914c98532becd93ab"} Oct 07 08:07:23 crc kubenswrapper[4875]: I1007 08:07:23.055606 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-metrics-fdff9cb8d-lrxvs" podStartSLOduration=1.700025771 podStartE2EDuration="7.055557984s" podCreationTimestamp="2025-10-07 08:07:16 +0000 UTC" firstStartedPulling="2025-10-07 08:07:16.820774828 +0000 UTC m=+661.780545371" lastFinishedPulling="2025-10-07 08:07:22.176307041 +0000 UTC m=+667.136077584" observedRunningTime="2025-10-07 08:07:23.049743086 +0000 UTC m=+668.009513679" watchObservedRunningTime="2025-10-07 08:07:23.055557984 +0000 UTC m=+668.015328567" Oct 07 08:07:26 crc kubenswrapper[4875]: I1007 08:07:26.626061 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-handler-cjhsf" Oct 07 08:07:26 crc kubenswrapper[4875]: I1007 08:07:26.951387 4875 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-6d7bcd6f86-gfk9f" Oct 07 08:07:26 crc kubenswrapper[4875]: I1007 08:07:26.959075 4875 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-6d7bcd6f86-gfk9f" Oct 07 08:07:26 crc kubenswrapper[4875]: I1007 08:07:26.980984 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-6d7bcd6f86-gfk9f" Oct 07 08:07:26 crc kubenswrapper[4875]: I1007 08:07:26.990054 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-6d7bcd6f86-gfk9f" Oct 07 08:07:27 crc kubenswrapper[4875]: I1007 08:07:27.057714 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-wccw7"] Oct 07 08:07:37 crc kubenswrapper[4875]: I1007 08:07:37.179030 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-webhook-6cdbc54649-sv4nj" Oct 07 08:07:51 crc kubenswrapper[4875]: I1007 08:07:51.895644 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2kt7sn"] Oct 07 08:07:51 crc kubenswrapper[4875]: I1007 08:07:51.898256 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2kt7sn" Oct 07 08:07:51 crc kubenswrapper[4875]: I1007 08:07:51.900262 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Oct 07 08:07:51 crc kubenswrapper[4875]: I1007 08:07:51.916512 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2kt7sn"] Oct 07 08:07:52 crc kubenswrapper[4875]: I1007 08:07:52.085987 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k8c7j\" (UniqueName: \"kubernetes.io/projected/d6b0d9bd-27b9-41ec-bc0e-87a065c184ce-kube-api-access-k8c7j\") pod \"8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2kt7sn\" (UID: \"d6b0d9bd-27b9-41ec-bc0e-87a065c184ce\") " pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2kt7sn" Oct 07 08:07:52 crc kubenswrapper[4875]: I1007 08:07:52.086087 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/d6b0d9bd-27b9-41ec-bc0e-87a065c184ce-util\") pod \"8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2kt7sn\" (UID: \"d6b0d9bd-27b9-41ec-bc0e-87a065c184ce\") " pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2kt7sn" Oct 07 08:07:52 crc kubenswrapper[4875]: I1007 08:07:52.086110 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/d6b0d9bd-27b9-41ec-bc0e-87a065c184ce-bundle\") pod \"8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2kt7sn\" (UID: \"d6b0d9bd-27b9-41ec-bc0e-87a065c184ce\") " pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2kt7sn" Oct 07 08:07:52 crc kubenswrapper[4875]: I1007 08:07:52.109040 4875 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-console/console-f9d7485db-wccw7" podUID="fd8a9079-4c7c-467a-9e0d-fa0c2bc15482" containerName="console" containerID="cri-o://5763155efae84b2a9f4b922c20b176bb5934b95c994ed12a7b2cbea89160415a" gracePeriod=15 Oct 07 08:07:52 crc kubenswrapper[4875]: I1007 08:07:52.187749 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k8c7j\" (UniqueName: \"kubernetes.io/projected/d6b0d9bd-27b9-41ec-bc0e-87a065c184ce-kube-api-access-k8c7j\") pod \"8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2kt7sn\" (UID: \"d6b0d9bd-27b9-41ec-bc0e-87a065c184ce\") " pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2kt7sn" Oct 07 08:07:52 crc kubenswrapper[4875]: I1007 08:07:52.187950 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/d6b0d9bd-27b9-41ec-bc0e-87a065c184ce-util\") pod \"8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2kt7sn\" (UID: \"d6b0d9bd-27b9-41ec-bc0e-87a065c184ce\") " pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2kt7sn" Oct 07 08:07:52 crc kubenswrapper[4875]: I1007 08:07:52.188000 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/d6b0d9bd-27b9-41ec-bc0e-87a065c184ce-bundle\") pod \"8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2kt7sn\" (UID: \"d6b0d9bd-27b9-41ec-bc0e-87a065c184ce\") " pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2kt7sn" Oct 07 08:07:52 crc kubenswrapper[4875]: I1007 08:07:52.188958 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/d6b0d9bd-27b9-41ec-bc0e-87a065c184ce-bundle\") pod \"8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2kt7sn\" (UID: \"d6b0d9bd-27b9-41ec-bc0e-87a065c184ce\") " pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2kt7sn" Oct 07 08:07:52 crc kubenswrapper[4875]: I1007 08:07:52.189254 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/d6b0d9bd-27b9-41ec-bc0e-87a065c184ce-util\") pod \"8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2kt7sn\" (UID: \"d6b0d9bd-27b9-41ec-bc0e-87a065c184ce\") " pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2kt7sn" Oct 07 08:07:52 crc kubenswrapper[4875]: I1007 08:07:52.226176 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k8c7j\" (UniqueName: \"kubernetes.io/projected/d6b0d9bd-27b9-41ec-bc0e-87a065c184ce-kube-api-access-k8c7j\") pod \"8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2kt7sn\" (UID: \"d6b0d9bd-27b9-41ec-bc0e-87a065c184ce\") " pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2kt7sn" Oct 07 08:07:52 crc kubenswrapper[4875]: I1007 08:07:52.245630 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2kt7sn" Oct 07 08:07:52 crc kubenswrapper[4875]: I1007 08:07:52.552666 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-wccw7_fd8a9079-4c7c-467a-9e0d-fa0c2bc15482/console/0.log" Oct 07 08:07:52 crc kubenswrapper[4875]: I1007 08:07:52.553207 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-wccw7" Oct 07 08:07:52 crc kubenswrapper[4875]: I1007 08:07:52.570794 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2kt7sn"] Oct 07 08:07:52 crc kubenswrapper[4875]: I1007 08:07:52.746571 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/fd8a9079-4c7c-467a-9e0d-fa0c2bc15482-console-serving-cert\") pod \"fd8a9079-4c7c-467a-9e0d-fa0c2bc15482\" (UID: \"fd8a9079-4c7c-467a-9e0d-fa0c2bc15482\") " Oct 07 08:07:52 crc kubenswrapper[4875]: I1007 08:07:52.747111 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/fd8a9079-4c7c-467a-9e0d-fa0c2bc15482-trusted-ca-bundle\") pod \"fd8a9079-4c7c-467a-9e0d-fa0c2bc15482\" (UID: \"fd8a9079-4c7c-467a-9e0d-fa0c2bc15482\") " Oct 07 08:07:52 crc kubenswrapper[4875]: I1007 08:07:52.747198 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/fd8a9079-4c7c-467a-9e0d-fa0c2bc15482-console-config\") pod \"fd8a9079-4c7c-467a-9e0d-fa0c2bc15482\" (UID: \"fd8a9079-4c7c-467a-9e0d-fa0c2bc15482\") " Oct 07 08:07:52 crc kubenswrapper[4875]: I1007 08:07:52.747229 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rxwxx\" (UniqueName: \"kubernetes.io/projected/fd8a9079-4c7c-467a-9e0d-fa0c2bc15482-kube-api-access-rxwxx\") pod \"fd8a9079-4c7c-467a-9e0d-fa0c2bc15482\" (UID: \"fd8a9079-4c7c-467a-9e0d-fa0c2bc15482\") " Oct 07 08:07:52 crc kubenswrapper[4875]: I1007 08:07:52.747290 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/fd8a9079-4c7c-467a-9e0d-fa0c2bc15482-oauth-serving-cert\") pod \"fd8a9079-4c7c-467a-9e0d-fa0c2bc15482\" (UID: \"fd8a9079-4c7c-467a-9e0d-fa0c2bc15482\") " Oct 07 08:07:52 crc kubenswrapper[4875]: I1007 08:07:52.747372 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/fd8a9079-4c7c-467a-9e0d-fa0c2bc15482-console-oauth-config\") pod \"fd8a9079-4c7c-467a-9e0d-fa0c2bc15482\" (UID: \"fd8a9079-4c7c-467a-9e0d-fa0c2bc15482\") " Oct 07 08:07:52 crc kubenswrapper[4875]: I1007 08:07:52.747450 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/fd8a9079-4c7c-467a-9e0d-fa0c2bc15482-service-ca\") pod \"fd8a9079-4c7c-467a-9e0d-fa0c2bc15482\" (UID: \"fd8a9079-4c7c-467a-9e0d-fa0c2bc15482\") " Oct 07 08:07:52 crc kubenswrapper[4875]: I1007 08:07:52.748417 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fd8a9079-4c7c-467a-9e0d-fa0c2bc15482-service-ca" (OuterVolumeSpecName: "service-ca") pod "fd8a9079-4c7c-467a-9e0d-fa0c2bc15482" (UID: "fd8a9079-4c7c-467a-9e0d-fa0c2bc15482"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 08:07:52 crc kubenswrapper[4875]: I1007 08:07:52.748429 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fd8a9079-4c7c-467a-9e0d-fa0c2bc15482-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "fd8a9079-4c7c-467a-9e0d-fa0c2bc15482" (UID: "fd8a9079-4c7c-467a-9e0d-fa0c2bc15482"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 08:07:52 crc kubenswrapper[4875]: I1007 08:07:52.748733 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fd8a9079-4c7c-467a-9e0d-fa0c2bc15482-console-config" (OuterVolumeSpecName: "console-config") pod "fd8a9079-4c7c-467a-9e0d-fa0c2bc15482" (UID: "fd8a9079-4c7c-467a-9e0d-fa0c2bc15482"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 08:07:52 crc kubenswrapper[4875]: I1007 08:07:52.748929 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fd8a9079-4c7c-467a-9e0d-fa0c2bc15482-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "fd8a9079-4c7c-467a-9e0d-fa0c2bc15482" (UID: "fd8a9079-4c7c-467a-9e0d-fa0c2bc15482"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 08:07:52 crc kubenswrapper[4875]: I1007 08:07:52.753774 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fd8a9079-4c7c-467a-9e0d-fa0c2bc15482-kube-api-access-rxwxx" (OuterVolumeSpecName: "kube-api-access-rxwxx") pod "fd8a9079-4c7c-467a-9e0d-fa0c2bc15482" (UID: "fd8a9079-4c7c-467a-9e0d-fa0c2bc15482"). InnerVolumeSpecName "kube-api-access-rxwxx". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 08:07:52 crc kubenswrapper[4875]: I1007 08:07:52.754139 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fd8a9079-4c7c-467a-9e0d-fa0c2bc15482-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "fd8a9079-4c7c-467a-9e0d-fa0c2bc15482" (UID: "fd8a9079-4c7c-467a-9e0d-fa0c2bc15482"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:07:52 crc kubenswrapper[4875]: I1007 08:07:52.754199 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fd8a9079-4c7c-467a-9e0d-fa0c2bc15482-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "fd8a9079-4c7c-467a-9e0d-fa0c2bc15482" (UID: "fd8a9079-4c7c-467a-9e0d-fa0c2bc15482"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:07:52 crc kubenswrapper[4875]: I1007 08:07:52.849234 4875 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/fd8a9079-4c7c-467a-9e0d-fa0c2bc15482-service-ca\") on node \"crc\" DevicePath \"\"" Oct 07 08:07:52 crc kubenswrapper[4875]: I1007 08:07:52.849285 4875 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/fd8a9079-4c7c-467a-9e0d-fa0c2bc15482-console-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 07 08:07:52 crc kubenswrapper[4875]: I1007 08:07:52.849298 4875 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/fd8a9079-4c7c-467a-9e0d-fa0c2bc15482-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 08:07:52 crc kubenswrapper[4875]: I1007 08:07:52.849308 4875 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/fd8a9079-4c7c-467a-9e0d-fa0c2bc15482-console-config\") on node \"crc\" DevicePath \"\"" Oct 07 08:07:52 crc kubenswrapper[4875]: I1007 08:07:52.849319 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rxwxx\" (UniqueName: \"kubernetes.io/projected/fd8a9079-4c7c-467a-9e0d-fa0c2bc15482-kube-api-access-rxwxx\") on node \"crc\" DevicePath \"\"" Oct 07 08:07:52 crc kubenswrapper[4875]: I1007 08:07:52.849328 4875 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/fd8a9079-4c7c-467a-9e0d-fa0c2bc15482-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 07 08:07:52 crc kubenswrapper[4875]: I1007 08:07:52.849337 4875 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/fd8a9079-4c7c-467a-9e0d-fa0c2bc15482-console-oauth-config\") on node \"crc\" DevicePath \"\"" Oct 07 08:07:53 crc kubenswrapper[4875]: I1007 08:07:53.241556 4875 generic.go:334] "Generic (PLEG): container finished" podID="d6b0d9bd-27b9-41ec-bc0e-87a065c184ce" containerID="91a2d1b09ddd5ce6ffe7506d0b916f9fffda56551ea9cc961bf8bf192e58b65c" exitCode=0 Oct 07 08:07:53 crc kubenswrapper[4875]: I1007 08:07:53.241642 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2kt7sn" event={"ID":"d6b0d9bd-27b9-41ec-bc0e-87a065c184ce","Type":"ContainerDied","Data":"91a2d1b09ddd5ce6ffe7506d0b916f9fffda56551ea9cc961bf8bf192e58b65c"} Oct 07 08:07:53 crc kubenswrapper[4875]: I1007 08:07:53.241746 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2kt7sn" event={"ID":"d6b0d9bd-27b9-41ec-bc0e-87a065c184ce","Type":"ContainerStarted","Data":"51185ef76b9cfb5c2ddf8ff5d2848385ce54291d78757a05f7f2e2a05e6957bd"} Oct 07 08:07:53 crc kubenswrapper[4875]: I1007 08:07:53.243606 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-wccw7_fd8a9079-4c7c-467a-9e0d-fa0c2bc15482/console/0.log" Oct 07 08:07:53 crc kubenswrapper[4875]: I1007 08:07:53.243660 4875 generic.go:334] "Generic (PLEG): container finished" podID="fd8a9079-4c7c-467a-9e0d-fa0c2bc15482" containerID="5763155efae84b2a9f4b922c20b176bb5934b95c994ed12a7b2cbea89160415a" exitCode=2 Oct 07 08:07:53 crc kubenswrapper[4875]: I1007 08:07:53.243707 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-wccw7" event={"ID":"fd8a9079-4c7c-467a-9e0d-fa0c2bc15482","Type":"ContainerDied","Data":"5763155efae84b2a9f4b922c20b176bb5934b95c994ed12a7b2cbea89160415a"} Oct 07 08:07:53 crc kubenswrapper[4875]: I1007 08:07:53.243731 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-wccw7" Oct 07 08:07:53 crc kubenswrapper[4875]: I1007 08:07:53.243752 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-wccw7" event={"ID":"fd8a9079-4c7c-467a-9e0d-fa0c2bc15482","Type":"ContainerDied","Data":"1b525f2ebbd53475b6527e53b3c0ca49a8bbeed6900600ef4998550d316b0e5a"} Oct 07 08:07:53 crc kubenswrapper[4875]: I1007 08:07:53.243799 4875 scope.go:117] "RemoveContainer" containerID="5763155efae84b2a9f4b922c20b176bb5934b95c994ed12a7b2cbea89160415a" Oct 07 08:07:53 crc kubenswrapper[4875]: I1007 08:07:53.280436 4875 scope.go:117] "RemoveContainer" containerID="5763155efae84b2a9f4b922c20b176bb5934b95c994ed12a7b2cbea89160415a" Oct 07 08:07:53 crc kubenswrapper[4875]: E1007 08:07:53.280944 4875 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5763155efae84b2a9f4b922c20b176bb5934b95c994ed12a7b2cbea89160415a\": container with ID starting with 5763155efae84b2a9f4b922c20b176bb5934b95c994ed12a7b2cbea89160415a not found: ID does not exist" containerID="5763155efae84b2a9f4b922c20b176bb5934b95c994ed12a7b2cbea89160415a" Oct 07 08:07:53 crc kubenswrapper[4875]: I1007 08:07:53.281023 4875 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5763155efae84b2a9f4b922c20b176bb5934b95c994ed12a7b2cbea89160415a"} err="failed to get container status \"5763155efae84b2a9f4b922c20b176bb5934b95c994ed12a7b2cbea89160415a\": rpc error: code = NotFound desc = could not find container \"5763155efae84b2a9f4b922c20b176bb5934b95c994ed12a7b2cbea89160415a\": container with ID starting with 5763155efae84b2a9f4b922c20b176bb5934b95c994ed12a7b2cbea89160415a not found: ID does not exist" Oct 07 08:07:53 crc kubenswrapper[4875]: I1007 08:07:53.292768 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-wccw7"] Oct 07 08:07:53 crc kubenswrapper[4875]: I1007 08:07:53.296263 4875 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-console/console-f9d7485db-wccw7"] Oct 07 08:07:53 crc kubenswrapper[4875]: I1007 08:07:53.707402 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fd8a9079-4c7c-467a-9e0d-fa0c2bc15482" path="/var/lib/kubelet/pods/fd8a9079-4c7c-467a-9e0d-fa0c2bc15482/volumes" Oct 07 08:07:56 crc kubenswrapper[4875]: I1007 08:07:56.270481 4875 generic.go:334] "Generic (PLEG): container finished" podID="d6b0d9bd-27b9-41ec-bc0e-87a065c184ce" containerID="193c24310f8d33fcc5f6271db77b89daff0d24aa88068387752942fe6fafd3ce" exitCode=0 Oct 07 08:07:56 crc kubenswrapper[4875]: I1007 08:07:56.270624 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2kt7sn" event={"ID":"d6b0d9bd-27b9-41ec-bc0e-87a065c184ce","Type":"ContainerDied","Data":"193c24310f8d33fcc5f6271db77b89daff0d24aa88068387752942fe6fafd3ce"} Oct 07 08:07:57 crc kubenswrapper[4875]: I1007 08:07:57.279488 4875 generic.go:334] "Generic (PLEG): container finished" podID="d6b0d9bd-27b9-41ec-bc0e-87a065c184ce" containerID="8040fa50eb7b2739ae7e1cc5bdd782c052056ce5eb28b20f7accdee8c995f31b" exitCode=0 Oct 07 08:07:57 crc kubenswrapper[4875]: I1007 08:07:57.279544 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2kt7sn" event={"ID":"d6b0d9bd-27b9-41ec-bc0e-87a065c184ce","Type":"ContainerDied","Data":"8040fa50eb7b2739ae7e1cc5bdd782c052056ce5eb28b20f7accdee8c995f31b"} Oct 07 08:07:58 crc kubenswrapper[4875]: I1007 08:07:58.598237 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2kt7sn" Oct 07 08:07:58 crc kubenswrapper[4875]: I1007 08:07:58.738235 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/d6b0d9bd-27b9-41ec-bc0e-87a065c184ce-bundle\") pod \"d6b0d9bd-27b9-41ec-bc0e-87a065c184ce\" (UID: \"d6b0d9bd-27b9-41ec-bc0e-87a065c184ce\") " Oct 07 08:07:58 crc kubenswrapper[4875]: I1007 08:07:58.738285 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k8c7j\" (UniqueName: \"kubernetes.io/projected/d6b0d9bd-27b9-41ec-bc0e-87a065c184ce-kube-api-access-k8c7j\") pod \"d6b0d9bd-27b9-41ec-bc0e-87a065c184ce\" (UID: \"d6b0d9bd-27b9-41ec-bc0e-87a065c184ce\") " Oct 07 08:07:58 crc kubenswrapper[4875]: I1007 08:07:58.738426 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/d6b0d9bd-27b9-41ec-bc0e-87a065c184ce-util\") pod \"d6b0d9bd-27b9-41ec-bc0e-87a065c184ce\" (UID: \"d6b0d9bd-27b9-41ec-bc0e-87a065c184ce\") " Oct 07 08:07:58 crc kubenswrapper[4875]: I1007 08:07:58.740514 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d6b0d9bd-27b9-41ec-bc0e-87a065c184ce-bundle" (OuterVolumeSpecName: "bundle") pod "d6b0d9bd-27b9-41ec-bc0e-87a065c184ce" (UID: "d6b0d9bd-27b9-41ec-bc0e-87a065c184ce"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 08:07:58 crc kubenswrapper[4875]: I1007 08:07:58.744035 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d6b0d9bd-27b9-41ec-bc0e-87a065c184ce-kube-api-access-k8c7j" (OuterVolumeSpecName: "kube-api-access-k8c7j") pod "d6b0d9bd-27b9-41ec-bc0e-87a065c184ce" (UID: "d6b0d9bd-27b9-41ec-bc0e-87a065c184ce"). InnerVolumeSpecName "kube-api-access-k8c7j". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 08:07:58 crc kubenswrapper[4875]: I1007 08:07:58.751042 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d6b0d9bd-27b9-41ec-bc0e-87a065c184ce-util" (OuterVolumeSpecName: "util") pod "d6b0d9bd-27b9-41ec-bc0e-87a065c184ce" (UID: "d6b0d9bd-27b9-41ec-bc0e-87a065c184ce"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 08:07:58 crc kubenswrapper[4875]: I1007 08:07:58.839766 4875 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/d6b0d9bd-27b9-41ec-bc0e-87a065c184ce-util\") on node \"crc\" DevicePath \"\"" Oct 07 08:07:58 crc kubenswrapper[4875]: I1007 08:07:58.839807 4875 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/d6b0d9bd-27b9-41ec-bc0e-87a065c184ce-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 08:07:58 crc kubenswrapper[4875]: I1007 08:07:58.839817 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k8c7j\" (UniqueName: \"kubernetes.io/projected/d6b0d9bd-27b9-41ec-bc0e-87a065c184ce-kube-api-access-k8c7j\") on node \"crc\" DevicePath \"\"" Oct 07 08:07:59 crc kubenswrapper[4875]: I1007 08:07:59.299005 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2kt7sn" event={"ID":"d6b0d9bd-27b9-41ec-bc0e-87a065c184ce","Type":"ContainerDied","Data":"51185ef76b9cfb5c2ddf8ff5d2848385ce54291d78757a05f7f2e2a05e6957bd"} Oct 07 08:07:59 crc kubenswrapper[4875]: I1007 08:07:59.299060 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2kt7sn" Oct 07 08:07:59 crc kubenswrapper[4875]: I1007 08:07:59.299094 4875 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="51185ef76b9cfb5c2ddf8ff5d2848385ce54291d78757a05f7f2e2a05e6957bd" Oct 07 08:08:01 crc kubenswrapper[4875]: I1007 08:08:01.221504 4875 patch_prober.go:28] interesting pod/machine-config-daemon-hx68m container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 07 08:08:01 crc kubenswrapper[4875]: I1007 08:08:01.221583 4875 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" podUID="3928c10c-c3da-41eb-96b2-629d67cfb31f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 07 08:08:07 crc kubenswrapper[4875]: I1007 08:08:07.283289 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-controller-manager-79ccb7884d-7cp64"] Oct 07 08:08:07 crc kubenswrapper[4875]: E1007 08:08:07.284223 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d6b0d9bd-27b9-41ec-bc0e-87a065c184ce" containerName="extract" Oct 07 08:08:07 crc kubenswrapper[4875]: I1007 08:08:07.284239 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="d6b0d9bd-27b9-41ec-bc0e-87a065c184ce" containerName="extract" Oct 07 08:08:07 crc kubenswrapper[4875]: E1007 08:08:07.284693 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d6b0d9bd-27b9-41ec-bc0e-87a065c184ce" containerName="pull" Oct 07 08:08:07 crc kubenswrapper[4875]: I1007 08:08:07.284710 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="d6b0d9bd-27b9-41ec-bc0e-87a065c184ce" containerName="pull" Oct 07 08:08:07 crc kubenswrapper[4875]: E1007 08:08:07.284727 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fd8a9079-4c7c-467a-9e0d-fa0c2bc15482" containerName="console" Oct 07 08:08:07 crc kubenswrapper[4875]: I1007 08:08:07.284734 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="fd8a9079-4c7c-467a-9e0d-fa0c2bc15482" containerName="console" Oct 07 08:08:07 crc kubenswrapper[4875]: E1007 08:08:07.284748 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d6b0d9bd-27b9-41ec-bc0e-87a065c184ce" containerName="util" Oct 07 08:08:07 crc kubenswrapper[4875]: I1007 08:08:07.284755 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="d6b0d9bd-27b9-41ec-bc0e-87a065c184ce" containerName="util" Oct 07 08:08:07 crc kubenswrapper[4875]: I1007 08:08:07.284913 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="d6b0d9bd-27b9-41ec-bc0e-87a065c184ce" containerName="extract" Oct 07 08:08:07 crc kubenswrapper[4875]: I1007 08:08:07.284933 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="fd8a9079-4c7c-467a-9e0d-fa0c2bc15482" containerName="console" Oct 07 08:08:07 crc kubenswrapper[4875]: I1007 08:08:07.285400 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-79ccb7884d-7cp64" Oct 07 08:08:07 crc kubenswrapper[4875]: I1007 08:08:07.292431 4875 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-cert" Oct 07 08:08:07 crc kubenswrapper[4875]: I1007 08:08:07.292473 4875 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-controller-manager-service-cert" Oct 07 08:08:07 crc kubenswrapper[4875]: I1007 08:08:07.292712 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"openshift-service-ca.crt" Oct 07 08:08:07 crc kubenswrapper[4875]: I1007 08:08:07.292814 4875 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"manager-account-dockercfg-gz589" Oct 07 08:08:07 crc kubenswrapper[4875]: I1007 08:08:07.293137 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"kube-root-ca.crt" Oct 07 08:08:07 crc kubenswrapper[4875]: I1007 08:08:07.301803 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-79ccb7884d-7cp64"] Oct 07 08:08:07 crc kubenswrapper[4875]: I1007 08:08:07.475656 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/497229d2-1650-4976-88c5-24f97a0afdb8-webhook-cert\") pod \"metallb-operator-controller-manager-79ccb7884d-7cp64\" (UID: \"497229d2-1650-4976-88c5-24f97a0afdb8\") " pod="metallb-system/metallb-operator-controller-manager-79ccb7884d-7cp64" Oct 07 08:08:07 crc kubenswrapper[4875]: I1007 08:08:07.476013 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nvt8r\" (UniqueName: \"kubernetes.io/projected/497229d2-1650-4976-88c5-24f97a0afdb8-kube-api-access-nvt8r\") pod \"metallb-operator-controller-manager-79ccb7884d-7cp64\" (UID: \"497229d2-1650-4976-88c5-24f97a0afdb8\") " pod="metallb-system/metallb-operator-controller-manager-79ccb7884d-7cp64" Oct 07 08:08:07 crc kubenswrapper[4875]: I1007 08:08:07.476453 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/497229d2-1650-4976-88c5-24f97a0afdb8-apiservice-cert\") pod \"metallb-operator-controller-manager-79ccb7884d-7cp64\" (UID: \"497229d2-1650-4976-88c5-24f97a0afdb8\") " pod="metallb-system/metallb-operator-controller-manager-79ccb7884d-7cp64" Oct 07 08:08:07 crc kubenswrapper[4875]: I1007 08:08:07.525177 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-webhook-server-769577ff68-db67z"] Oct 07 08:08:07 crc kubenswrapper[4875]: I1007 08:08:07.526142 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-769577ff68-db67z" Oct 07 08:08:07 crc kubenswrapper[4875]: I1007 08:08:07.528443 4875 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-service-cert" Oct 07 08:08:07 crc kubenswrapper[4875]: I1007 08:08:07.528680 4875 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-dockercfg-dxktb" Oct 07 08:08:07 crc kubenswrapper[4875]: I1007 08:08:07.529347 4875 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Oct 07 08:08:07 crc kubenswrapper[4875]: I1007 08:08:07.548436 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-769577ff68-db67z"] Oct 07 08:08:07 crc kubenswrapper[4875]: I1007 08:08:07.578132 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nvt8r\" (UniqueName: \"kubernetes.io/projected/497229d2-1650-4976-88c5-24f97a0afdb8-kube-api-access-nvt8r\") pod \"metallb-operator-controller-manager-79ccb7884d-7cp64\" (UID: \"497229d2-1650-4976-88c5-24f97a0afdb8\") " pod="metallb-system/metallb-operator-controller-manager-79ccb7884d-7cp64" Oct 07 08:08:07 crc kubenswrapper[4875]: I1007 08:08:07.578243 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/497229d2-1650-4976-88c5-24f97a0afdb8-apiservice-cert\") pod \"metallb-operator-controller-manager-79ccb7884d-7cp64\" (UID: \"497229d2-1650-4976-88c5-24f97a0afdb8\") " pod="metallb-system/metallb-operator-controller-manager-79ccb7884d-7cp64" Oct 07 08:08:07 crc kubenswrapper[4875]: I1007 08:08:07.578334 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/497229d2-1650-4976-88c5-24f97a0afdb8-webhook-cert\") pod \"metallb-operator-controller-manager-79ccb7884d-7cp64\" (UID: \"497229d2-1650-4976-88c5-24f97a0afdb8\") " pod="metallb-system/metallb-operator-controller-manager-79ccb7884d-7cp64" Oct 07 08:08:07 crc kubenswrapper[4875]: I1007 08:08:07.589897 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/497229d2-1650-4976-88c5-24f97a0afdb8-webhook-cert\") pod \"metallb-operator-controller-manager-79ccb7884d-7cp64\" (UID: \"497229d2-1650-4976-88c5-24f97a0afdb8\") " pod="metallb-system/metallb-operator-controller-manager-79ccb7884d-7cp64" Oct 07 08:08:07 crc kubenswrapper[4875]: I1007 08:08:07.604608 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nvt8r\" (UniqueName: \"kubernetes.io/projected/497229d2-1650-4976-88c5-24f97a0afdb8-kube-api-access-nvt8r\") pod \"metallb-operator-controller-manager-79ccb7884d-7cp64\" (UID: \"497229d2-1650-4976-88c5-24f97a0afdb8\") " pod="metallb-system/metallb-operator-controller-manager-79ccb7884d-7cp64" Oct 07 08:08:07 crc kubenswrapper[4875]: I1007 08:08:07.614217 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/497229d2-1650-4976-88c5-24f97a0afdb8-apiservice-cert\") pod \"metallb-operator-controller-manager-79ccb7884d-7cp64\" (UID: \"497229d2-1650-4976-88c5-24f97a0afdb8\") " pod="metallb-system/metallb-operator-controller-manager-79ccb7884d-7cp64" Oct 07 08:08:07 crc kubenswrapper[4875]: I1007 08:08:07.680250 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r6rn6\" (UniqueName: \"kubernetes.io/projected/f34be03b-6e0c-40e3-99b9-3b1dbe22e40a-kube-api-access-r6rn6\") pod \"metallb-operator-webhook-server-769577ff68-db67z\" (UID: \"f34be03b-6e0c-40e3-99b9-3b1dbe22e40a\") " pod="metallb-system/metallb-operator-webhook-server-769577ff68-db67z" Oct 07 08:08:07 crc kubenswrapper[4875]: I1007 08:08:07.680322 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/f34be03b-6e0c-40e3-99b9-3b1dbe22e40a-apiservice-cert\") pod \"metallb-operator-webhook-server-769577ff68-db67z\" (UID: \"f34be03b-6e0c-40e3-99b9-3b1dbe22e40a\") " pod="metallb-system/metallb-operator-webhook-server-769577ff68-db67z" Oct 07 08:08:07 crc kubenswrapper[4875]: I1007 08:08:07.680349 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/f34be03b-6e0c-40e3-99b9-3b1dbe22e40a-webhook-cert\") pod \"metallb-operator-webhook-server-769577ff68-db67z\" (UID: \"f34be03b-6e0c-40e3-99b9-3b1dbe22e40a\") " pod="metallb-system/metallb-operator-webhook-server-769577ff68-db67z" Oct 07 08:08:07 crc kubenswrapper[4875]: I1007 08:08:07.781724 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r6rn6\" (UniqueName: \"kubernetes.io/projected/f34be03b-6e0c-40e3-99b9-3b1dbe22e40a-kube-api-access-r6rn6\") pod \"metallb-operator-webhook-server-769577ff68-db67z\" (UID: \"f34be03b-6e0c-40e3-99b9-3b1dbe22e40a\") " pod="metallb-system/metallb-operator-webhook-server-769577ff68-db67z" Oct 07 08:08:07 crc kubenswrapper[4875]: I1007 08:08:07.781821 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/f34be03b-6e0c-40e3-99b9-3b1dbe22e40a-apiservice-cert\") pod \"metallb-operator-webhook-server-769577ff68-db67z\" (UID: \"f34be03b-6e0c-40e3-99b9-3b1dbe22e40a\") " pod="metallb-system/metallb-operator-webhook-server-769577ff68-db67z" Oct 07 08:08:07 crc kubenswrapper[4875]: I1007 08:08:07.781918 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/f34be03b-6e0c-40e3-99b9-3b1dbe22e40a-webhook-cert\") pod \"metallb-operator-webhook-server-769577ff68-db67z\" (UID: \"f34be03b-6e0c-40e3-99b9-3b1dbe22e40a\") " pod="metallb-system/metallb-operator-webhook-server-769577ff68-db67z" Oct 07 08:08:07 crc kubenswrapper[4875]: I1007 08:08:07.786015 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/f34be03b-6e0c-40e3-99b9-3b1dbe22e40a-webhook-cert\") pod \"metallb-operator-webhook-server-769577ff68-db67z\" (UID: \"f34be03b-6e0c-40e3-99b9-3b1dbe22e40a\") " pod="metallb-system/metallb-operator-webhook-server-769577ff68-db67z" Oct 07 08:08:07 crc kubenswrapper[4875]: I1007 08:08:07.789394 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/f34be03b-6e0c-40e3-99b9-3b1dbe22e40a-apiservice-cert\") pod \"metallb-operator-webhook-server-769577ff68-db67z\" (UID: \"f34be03b-6e0c-40e3-99b9-3b1dbe22e40a\") " pod="metallb-system/metallb-operator-webhook-server-769577ff68-db67z" Oct 07 08:08:07 crc kubenswrapper[4875]: I1007 08:08:07.797582 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r6rn6\" (UniqueName: \"kubernetes.io/projected/f34be03b-6e0c-40e3-99b9-3b1dbe22e40a-kube-api-access-r6rn6\") pod \"metallb-operator-webhook-server-769577ff68-db67z\" (UID: \"f34be03b-6e0c-40e3-99b9-3b1dbe22e40a\") " pod="metallb-system/metallb-operator-webhook-server-769577ff68-db67z" Oct 07 08:08:07 crc kubenswrapper[4875]: I1007 08:08:07.838723 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-769577ff68-db67z" Oct 07 08:08:07 crc kubenswrapper[4875]: I1007 08:08:07.904489 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-79ccb7884d-7cp64" Oct 07 08:08:08 crc kubenswrapper[4875]: I1007 08:08:08.279528 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-79ccb7884d-7cp64"] Oct 07 08:08:08 crc kubenswrapper[4875]: I1007 08:08:08.331793 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-769577ff68-db67z"] Oct 07 08:08:08 crc kubenswrapper[4875]: I1007 08:08:08.364725 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-769577ff68-db67z" event={"ID":"f34be03b-6e0c-40e3-99b9-3b1dbe22e40a","Type":"ContainerStarted","Data":"962c50fe18ff5d5f25583f3f5101551b50d994bf64b3b94f4e2590944dd313e7"} Oct 07 08:08:08 crc kubenswrapper[4875]: I1007 08:08:08.366174 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-79ccb7884d-7cp64" event={"ID":"497229d2-1650-4976-88c5-24f97a0afdb8","Type":"ContainerStarted","Data":"f41969cdf2d69d36c02d32d9e81f9a832f7f41f7327ba1c6d65947072a38a15c"} Oct 07 08:08:12 crc kubenswrapper[4875]: I1007 08:08:12.400225 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-79ccb7884d-7cp64" event={"ID":"497229d2-1650-4976-88c5-24f97a0afdb8","Type":"ContainerStarted","Data":"dc10b3e463d512c488711964a11392a0170d5d2418e63985fb203191b8cc59fc"} Oct 07 08:08:12 crc kubenswrapper[4875]: I1007 08:08:12.402298 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-controller-manager-79ccb7884d-7cp64" Oct 07 08:08:12 crc kubenswrapper[4875]: I1007 08:08:12.433806 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-controller-manager-79ccb7884d-7cp64" podStartSLOduration=2.174711921 podStartE2EDuration="5.433784722s" podCreationTimestamp="2025-10-07 08:08:07 +0000 UTC" firstStartedPulling="2025-10-07 08:08:08.282695219 +0000 UTC m=+713.242465762" lastFinishedPulling="2025-10-07 08:08:11.54176802 +0000 UTC m=+716.501538563" observedRunningTime="2025-10-07 08:08:12.426073877 +0000 UTC m=+717.385844420" watchObservedRunningTime="2025-10-07 08:08:12.433784722 +0000 UTC m=+717.393555265" Oct 07 08:08:14 crc kubenswrapper[4875]: I1007 08:08:14.435977 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-769577ff68-db67z" event={"ID":"f34be03b-6e0c-40e3-99b9-3b1dbe22e40a","Type":"ContainerStarted","Data":"d70d2ea753ee4015c99c91c09bdcd70fcaef3832320357f9e2c7470d2869517d"} Oct 07 08:08:14 crc kubenswrapper[4875]: I1007 08:08:14.436475 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-webhook-server-769577ff68-db67z" Oct 07 08:08:14 crc kubenswrapper[4875]: I1007 08:08:14.480225 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-webhook-server-769577ff68-db67z" podStartSLOduration=2.47284573 podStartE2EDuration="7.480199953s" podCreationTimestamp="2025-10-07 08:08:07 +0000 UTC" firstStartedPulling="2025-10-07 08:08:08.339749725 +0000 UTC m=+713.299520268" lastFinishedPulling="2025-10-07 08:08:13.347103948 +0000 UTC m=+718.306874491" observedRunningTime="2025-10-07 08:08:14.474907141 +0000 UTC m=+719.434677714" watchObservedRunningTime="2025-10-07 08:08:14.480199953 +0000 UTC m=+719.439970506" Oct 07 08:08:27 crc kubenswrapper[4875]: I1007 08:08:27.846098 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-webhook-server-769577ff68-db67z" Oct 07 08:08:31 crc kubenswrapper[4875]: I1007 08:08:31.221592 4875 patch_prober.go:28] interesting pod/machine-config-daemon-hx68m container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 07 08:08:31 crc kubenswrapper[4875]: I1007 08:08:31.222021 4875 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" podUID="3928c10c-c3da-41eb-96b2-629d67cfb31f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 07 08:08:42 crc kubenswrapper[4875]: I1007 08:08:42.890086 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-gk4f8"] Oct 07 08:08:42 crc kubenswrapper[4875]: I1007 08:08:42.891675 4875 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-879f6c89f-gk4f8" podUID="de07f99e-f06a-4800-96f5-ecfebf9630f2" containerName="controller-manager" containerID="cri-o://73ed0999bb7b60acefc5124e4e780af48bd9ddfa872a09b9b4fb69bce9273e03" gracePeriod=30 Oct 07 08:08:42 crc kubenswrapper[4875]: I1007 08:08:42.965307 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-cf7b2"] Oct 07 08:08:42 crc kubenswrapper[4875]: I1007 08:08:42.965555 4875 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-cf7b2" podUID="c26c9450-7ff8-4142-b007-01a00adbb28d" containerName="route-controller-manager" containerID="cri-o://c658d911a47905d896e249513cab92d2baa881c89e470b836749789321561d38" gracePeriod=30 Oct 07 08:08:43 crc kubenswrapper[4875]: I1007 08:08:43.486915 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-cf7b2" Oct 07 08:08:43 crc kubenswrapper[4875]: I1007 08:08:43.491369 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-gk4f8" Oct 07 08:08:43 crc kubenswrapper[4875]: I1007 08:08:43.662773 4875 generic.go:334] "Generic (PLEG): container finished" podID="c26c9450-7ff8-4142-b007-01a00adbb28d" containerID="c658d911a47905d896e249513cab92d2baa881c89e470b836749789321561d38" exitCode=0 Oct 07 08:08:43 crc kubenswrapper[4875]: I1007 08:08:43.662853 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-cf7b2" event={"ID":"c26c9450-7ff8-4142-b007-01a00adbb28d","Type":"ContainerDied","Data":"c658d911a47905d896e249513cab92d2baa881c89e470b836749789321561d38"} Oct 07 08:08:43 crc kubenswrapper[4875]: I1007 08:08:43.662926 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-cf7b2" event={"ID":"c26c9450-7ff8-4142-b007-01a00adbb28d","Type":"ContainerDied","Data":"19ef1c4a0bfdc1399aefe9822e38dc9d3c7f0a394f854dfb62cd6df04800d582"} Oct 07 08:08:43 crc kubenswrapper[4875]: I1007 08:08:43.662854 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-cf7b2" Oct 07 08:08:43 crc kubenswrapper[4875]: I1007 08:08:43.662952 4875 scope.go:117] "RemoveContainer" containerID="c658d911a47905d896e249513cab92d2baa881c89e470b836749789321561d38" Oct 07 08:08:43 crc kubenswrapper[4875]: I1007 08:08:43.663718 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xkzx6\" (UniqueName: \"kubernetes.io/projected/de07f99e-f06a-4800-96f5-ecfebf9630f2-kube-api-access-xkzx6\") pod \"de07f99e-f06a-4800-96f5-ecfebf9630f2\" (UID: \"de07f99e-f06a-4800-96f5-ecfebf9630f2\") " Oct 07 08:08:43 crc kubenswrapper[4875]: I1007 08:08:43.663782 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kvf2h\" (UniqueName: \"kubernetes.io/projected/c26c9450-7ff8-4142-b007-01a00adbb28d-kube-api-access-kvf2h\") pod \"c26c9450-7ff8-4142-b007-01a00adbb28d\" (UID: \"c26c9450-7ff8-4142-b007-01a00adbb28d\") " Oct 07 08:08:43 crc kubenswrapper[4875]: I1007 08:08:43.663818 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/de07f99e-f06a-4800-96f5-ecfebf9630f2-config\") pod \"de07f99e-f06a-4800-96f5-ecfebf9630f2\" (UID: \"de07f99e-f06a-4800-96f5-ecfebf9630f2\") " Oct 07 08:08:43 crc kubenswrapper[4875]: I1007 08:08:43.663845 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/de07f99e-f06a-4800-96f5-ecfebf9630f2-proxy-ca-bundles\") pod \"de07f99e-f06a-4800-96f5-ecfebf9630f2\" (UID: \"de07f99e-f06a-4800-96f5-ecfebf9630f2\") " Oct 07 08:08:43 crc kubenswrapper[4875]: I1007 08:08:43.663950 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/de07f99e-f06a-4800-96f5-ecfebf9630f2-serving-cert\") pod \"de07f99e-f06a-4800-96f5-ecfebf9630f2\" (UID: \"de07f99e-f06a-4800-96f5-ecfebf9630f2\") " Oct 07 08:08:43 crc kubenswrapper[4875]: I1007 08:08:43.664054 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c26c9450-7ff8-4142-b007-01a00adbb28d-client-ca\") pod \"c26c9450-7ff8-4142-b007-01a00adbb28d\" (UID: \"c26c9450-7ff8-4142-b007-01a00adbb28d\") " Oct 07 08:08:43 crc kubenswrapper[4875]: I1007 08:08:43.664092 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/de07f99e-f06a-4800-96f5-ecfebf9630f2-client-ca\") pod \"de07f99e-f06a-4800-96f5-ecfebf9630f2\" (UID: \"de07f99e-f06a-4800-96f5-ecfebf9630f2\") " Oct 07 08:08:43 crc kubenswrapper[4875]: I1007 08:08:43.664123 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c26c9450-7ff8-4142-b007-01a00adbb28d-config\") pod \"c26c9450-7ff8-4142-b007-01a00adbb28d\" (UID: \"c26c9450-7ff8-4142-b007-01a00adbb28d\") " Oct 07 08:08:43 crc kubenswrapper[4875]: I1007 08:08:43.664155 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c26c9450-7ff8-4142-b007-01a00adbb28d-serving-cert\") pod \"c26c9450-7ff8-4142-b007-01a00adbb28d\" (UID: \"c26c9450-7ff8-4142-b007-01a00adbb28d\") " Oct 07 08:08:43 crc kubenswrapper[4875]: I1007 08:08:43.664745 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/de07f99e-f06a-4800-96f5-ecfebf9630f2-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "de07f99e-f06a-4800-96f5-ecfebf9630f2" (UID: "de07f99e-f06a-4800-96f5-ecfebf9630f2"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 08:08:43 crc kubenswrapper[4875]: I1007 08:08:43.664797 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/de07f99e-f06a-4800-96f5-ecfebf9630f2-config" (OuterVolumeSpecName: "config") pod "de07f99e-f06a-4800-96f5-ecfebf9630f2" (UID: "de07f99e-f06a-4800-96f5-ecfebf9630f2"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 08:08:43 crc kubenswrapper[4875]: I1007 08:08:43.665052 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/de07f99e-f06a-4800-96f5-ecfebf9630f2-client-ca" (OuterVolumeSpecName: "client-ca") pod "de07f99e-f06a-4800-96f5-ecfebf9630f2" (UID: "de07f99e-f06a-4800-96f5-ecfebf9630f2"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 08:08:43 crc kubenswrapper[4875]: I1007 08:08:43.665219 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c26c9450-7ff8-4142-b007-01a00adbb28d-config" (OuterVolumeSpecName: "config") pod "c26c9450-7ff8-4142-b007-01a00adbb28d" (UID: "c26c9450-7ff8-4142-b007-01a00adbb28d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 08:08:43 crc kubenswrapper[4875]: I1007 08:08:43.665379 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c26c9450-7ff8-4142-b007-01a00adbb28d-client-ca" (OuterVolumeSpecName: "client-ca") pod "c26c9450-7ff8-4142-b007-01a00adbb28d" (UID: "c26c9450-7ff8-4142-b007-01a00adbb28d"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 08:08:43 crc kubenswrapper[4875]: I1007 08:08:43.665892 4875 generic.go:334] "Generic (PLEG): container finished" podID="de07f99e-f06a-4800-96f5-ecfebf9630f2" containerID="73ed0999bb7b60acefc5124e4e780af48bd9ddfa872a09b9b4fb69bce9273e03" exitCode=0 Oct 07 08:08:43 crc kubenswrapper[4875]: I1007 08:08:43.665937 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-gk4f8" event={"ID":"de07f99e-f06a-4800-96f5-ecfebf9630f2","Type":"ContainerDied","Data":"73ed0999bb7b60acefc5124e4e780af48bd9ddfa872a09b9b4fb69bce9273e03"} Oct 07 08:08:43 crc kubenswrapper[4875]: I1007 08:08:43.665971 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-gk4f8" event={"ID":"de07f99e-f06a-4800-96f5-ecfebf9630f2","Type":"ContainerDied","Data":"b1c30cbccdb858fd5ebb00b9f84f657ecec36474f5f4d36a1d7a0906b4b6b936"} Oct 07 08:08:43 crc kubenswrapper[4875]: I1007 08:08:43.666013 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-gk4f8" Oct 07 08:08:43 crc kubenswrapper[4875]: I1007 08:08:43.671689 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c26c9450-7ff8-4142-b007-01a00adbb28d-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "c26c9450-7ff8-4142-b007-01a00adbb28d" (UID: "c26c9450-7ff8-4142-b007-01a00adbb28d"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:08:43 crc kubenswrapper[4875]: I1007 08:08:43.672375 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/de07f99e-f06a-4800-96f5-ecfebf9630f2-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "de07f99e-f06a-4800-96f5-ecfebf9630f2" (UID: "de07f99e-f06a-4800-96f5-ecfebf9630f2"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:08:43 crc kubenswrapper[4875]: I1007 08:08:43.674714 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/de07f99e-f06a-4800-96f5-ecfebf9630f2-kube-api-access-xkzx6" (OuterVolumeSpecName: "kube-api-access-xkzx6") pod "de07f99e-f06a-4800-96f5-ecfebf9630f2" (UID: "de07f99e-f06a-4800-96f5-ecfebf9630f2"). InnerVolumeSpecName "kube-api-access-xkzx6". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 08:08:43 crc kubenswrapper[4875]: I1007 08:08:43.678617 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c26c9450-7ff8-4142-b007-01a00adbb28d-kube-api-access-kvf2h" (OuterVolumeSpecName: "kube-api-access-kvf2h") pod "c26c9450-7ff8-4142-b007-01a00adbb28d" (UID: "c26c9450-7ff8-4142-b007-01a00adbb28d"). InnerVolumeSpecName "kube-api-access-kvf2h". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 08:08:43 crc kubenswrapper[4875]: I1007 08:08:43.688175 4875 scope.go:117] "RemoveContainer" containerID="c658d911a47905d896e249513cab92d2baa881c89e470b836749789321561d38" Oct 07 08:08:43 crc kubenswrapper[4875]: E1007 08:08:43.688768 4875 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c658d911a47905d896e249513cab92d2baa881c89e470b836749789321561d38\": container with ID starting with c658d911a47905d896e249513cab92d2baa881c89e470b836749789321561d38 not found: ID does not exist" containerID="c658d911a47905d896e249513cab92d2baa881c89e470b836749789321561d38" Oct 07 08:08:43 crc kubenswrapper[4875]: I1007 08:08:43.688818 4875 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c658d911a47905d896e249513cab92d2baa881c89e470b836749789321561d38"} err="failed to get container status \"c658d911a47905d896e249513cab92d2baa881c89e470b836749789321561d38\": rpc error: code = NotFound desc = could not find container \"c658d911a47905d896e249513cab92d2baa881c89e470b836749789321561d38\": container with ID starting with c658d911a47905d896e249513cab92d2baa881c89e470b836749789321561d38 not found: ID does not exist" Oct 07 08:08:43 crc kubenswrapper[4875]: I1007 08:08:43.688852 4875 scope.go:117] "RemoveContainer" containerID="73ed0999bb7b60acefc5124e4e780af48bd9ddfa872a09b9b4fb69bce9273e03" Oct 07 08:08:43 crc kubenswrapper[4875]: I1007 08:08:43.707014 4875 scope.go:117] "RemoveContainer" containerID="73ed0999bb7b60acefc5124e4e780af48bd9ddfa872a09b9b4fb69bce9273e03" Oct 07 08:08:43 crc kubenswrapper[4875]: E1007 08:08:43.707454 4875 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"73ed0999bb7b60acefc5124e4e780af48bd9ddfa872a09b9b4fb69bce9273e03\": container with ID starting with 73ed0999bb7b60acefc5124e4e780af48bd9ddfa872a09b9b4fb69bce9273e03 not found: ID does not exist" containerID="73ed0999bb7b60acefc5124e4e780af48bd9ddfa872a09b9b4fb69bce9273e03" Oct 07 08:08:43 crc kubenswrapper[4875]: I1007 08:08:43.707512 4875 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"73ed0999bb7b60acefc5124e4e780af48bd9ddfa872a09b9b4fb69bce9273e03"} err="failed to get container status \"73ed0999bb7b60acefc5124e4e780af48bd9ddfa872a09b9b4fb69bce9273e03\": rpc error: code = NotFound desc = could not find container \"73ed0999bb7b60acefc5124e4e780af48bd9ddfa872a09b9b4fb69bce9273e03\": container with ID starting with 73ed0999bb7b60acefc5124e4e780af48bd9ddfa872a09b9b4fb69bce9273e03 not found: ID does not exist" Oct 07 08:08:43 crc kubenswrapper[4875]: I1007 08:08:43.765979 4875 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c26c9450-7ff8-4142-b007-01a00adbb28d-client-ca\") on node \"crc\" DevicePath \"\"" Oct 07 08:08:43 crc kubenswrapper[4875]: I1007 08:08:43.766016 4875 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/de07f99e-f06a-4800-96f5-ecfebf9630f2-client-ca\") on node \"crc\" DevicePath \"\"" Oct 07 08:08:43 crc kubenswrapper[4875]: I1007 08:08:43.766028 4875 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c26c9450-7ff8-4142-b007-01a00adbb28d-config\") on node \"crc\" DevicePath \"\"" Oct 07 08:08:43 crc kubenswrapper[4875]: I1007 08:08:43.766037 4875 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c26c9450-7ff8-4142-b007-01a00adbb28d-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 07 08:08:43 crc kubenswrapper[4875]: I1007 08:08:43.766048 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xkzx6\" (UniqueName: \"kubernetes.io/projected/de07f99e-f06a-4800-96f5-ecfebf9630f2-kube-api-access-xkzx6\") on node \"crc\" DevicePath \"\"" Oct 07 08:08:43 crc kubenswrapper[4875]: I1007 08:08:43.766060 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kvf2h\" (UniqueName: \"kubernetes.io/projected/c26c9450-7ff8-4142-b007-01a00adbb28d-kube-api-access-kvf2h\") on node \"crc\" DevicePath \"\"" Oct 07 08:08:43 crc kubenswrapper[4875]: I1007 08:08:43.766068 4875 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/de07f99e-f06a-4800-96f5-ecfebf9630f2-config\") on node \"crc\" DevicePath \"\"" Oct 07 08:08:43 crc kubenswrapper[4875]: I1007 08:08:43.766076 4875 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/de07f99e-f06a-4800-96f5-ecfebf9630f2-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Oct 07 08:08:43 crc kubenswrapper[4875]: I1007 08:08:43.766087 4875 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/de07f99e-f06a-4800-96f5-ecfebf9630f2-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 07 08:08:43 crc kubenswrapper[4875]: I1007 08:08:43.986092 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-cf7b2"] Oct 07 08:08:43 crc kubenswrapper[4875]: I1007 08:08:43.992518 4875 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-cf7b2"] Oct 07 08:08:44 crc kubenswrapper[4875]: I1007 08:08:44.001802 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-gk4f8"] Oct 07 08:08:44 crc kubenswrapper[4875]: I1007 08:08:44.012271 4875 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-gk4f8"] Oct 07 08:08:44 crc kubenswrapper[4875]: I1007 08:08:44.700226 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-695c5d4c77-z862z"] Oct 07 08:08:44 crc kubenswrapper[4875]: E1007 08:08:44.700541 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c26c9450-7ff8-4142-b007-01a00adbb28d" containerName="route-controller-manager" Oct 07 08:08:44 crc kubenswrapper[4875]: I1007 08:08:44.700559 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="c26c9450-7ff8-4142-b007-01a00adbb28d" containerName="route-controller-manager" Oct 07 08:08:44 crc kubenswrapper[4875]: E1007 08:08:44.700572 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="de07f99e-f06a-4800-96f5-ecfebf9630f2" containerName="controller-manager" Oct 07 08:08:44 crc kubenswrapper[4875]: I1007 08:08:44.700580 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="de07f99e-f06a-4800-96f5-ecfebf9630f2" containerName="controller-manager" Oct 07 08:08:44 crc kubenswrapper[4875]: I1007 08:08:44.700713 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="de07f99e-f06a-4800-96f5-ecfebf9630f2" containerName="controller-manager" Oct 07 08:08:44 crc kubenswrapper[4875]: I1007 08:08:44.700732 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="c26c9450-7ff8-4142-b007-01a00adbb28d" containerName="route-controller-manager" Oct 07 08:08:44 crc kubenswrapper[4875]: I1007 08:08:44.701243 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-695c5d4c77-z862z" Oct 07 08:08:44 crc kubenswrapper[4875]: I1007 08:08:44.702928 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-6468d4b774-6jmgl"] Oct 07 08:08:44 crc kubenswrapper[4875]: I1007 08:08:44.703413 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-6468d4b774-6jmgl" Oct 07 08:08:44 crc kubenswrapper[4875]: I1007 08:08:44.707510 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Oct 07 08:08:44 crc kubenswrapper[4875]: I1007 08:08:44.707943 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Oct 07 08:08:44 crc kubenswrapper[4875]: I1007 08:08:44.708131 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Oct 07 08:08:44 crc kubenswrapper[4875]: I1007 08:08:44.708289 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Oct 07 08:08:44 crc kubenswrapper[4875]: I1007 08:08:44.708528 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Oct 07 08:08:44 crc kubenswrapper[4875]: I1007 08:08:44.709287 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Oct 07 08:08:44 crc kubenswrapper[4875]: I1007 08:08:44.709355 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Oct 07 08:08:44 crc kubenswrapper[4875]: I1007 08:08:44.709483 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Oct 07 08:08:44 crc kubenswrapper[4875]: I1007 08:08:44.709518 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Oct 07 08:08:44 crc kubenswrapper[4875]: I1007 08:08:44.709726 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Oct 07 08:08:44 crc kubenswrapper[4875]: I1007 08:08:44.711847 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Oct 07 08:08:44 crc kubenswrapper[4875]: I1007 08:08:44.712174 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Oct 07 08:08:44 crc kubenswrapper[4875]: I1007 08:08:44.722382 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Oct 07 08:08:44 crc kubenswrapper[4875]: I1007 08:08:44.723423 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-695c5d4c77-z862z"] Oct 07 08:08:44 crc kubenswrapper[4875]: I1007 08:08:44.729868 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-6468d4b774-6jmgl"] Oct 07 08:08:44 crc kubenswrapper[4875]: I1007 08:08:44.780723 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/822b5eab-8104-4e3d-88e6-52053e69f49e-serving-cert\") pod \"route-controller-manager-695c5d4c77-z862z\" (UID: \"822b5eab-8104-4e3d-88e6-52053e69f49e\") " pod="openshift-route-controller-manager/route-controller-manager-695c5d4c77-z862z" Oct 07 08:08:44 crc kubenswrapper[4875]: I1007 08:08:44.780839 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d773ec05-8da3-4e65-8a35-817fad655987-serving-cert\") pod \"controller-manager-6468d4b774-6jmgl\" (UID: \"d773ec05-8da3-4e65-8a35-817fad655987\") " pod="openshift-controller-manager/controller-manager-6468d4b774-6jmgl" Oct 07 08:08:44 crc kubenswrapper[4875]: I1007 08:08:44.780895 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d773ec05-8da3-4e65-8a35-817fad655987-config\") pod \"controller-manager-6468d4b774-6jmgl\" (UID: \"d773ec05-8da3-4e65-8a35-817fad655987\") " pod="openshift-controller-manager/controller-manager-6468d4b774-6jmgl" Oct 07 08:08:44 crc kubenswrapper[4875]: I1007 08:08:44.780922 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x75d5\" (UniqueName: \"kubernetes.io/projected/822b5eab-8104-4e3d-88e6-52053e69f49e-kube-api-access-x75d5\") pod \"route-controller-manager-695c5d4c77-z862z\" (UID: \"822b5eab-8104-4e3d-88e6-52053e69f49e\") " pod="openshift-route-controller-manager/route-controller-manager-695c5d4c77-z862z" Oct 07 08:08:44 crc kubenswrapper[4875]: I1007 08:08:44.781001 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/d773ec05-8da3-4e65-8a35-817fad655987-client-ca\") pod \"controller-manager-6468d4b774-6jmgl\" (UID: \"d773ec05-8da3-4e65-8a35-817fad655987\") " pod="openshift-controller-manager/controller-manager-6468d4b774-6jmgl" Oct 07 08:08:44 crc kubenswrapper[4875]: I1007 08:08:44.781029 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-df7vn\" (UniqueName: \"kubernetes.io/projected/d773ec05-8da3-4e65-8a35-817fad655987-kube-api-access-df7vn\") pod \"controller-manager-6468d4b774-6jmgl\" (UID: \"d773ec05-8da3-4e65-8a35-817fad655987\") " pod="openshift-controller-manager/controller-manager-6468d4b774-6jmgl" Oct 07 08:08:44 crc kubenswrapper[4875]: I1007 08:08:44.781055 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/822b5eab-8104-4e3d-88e6-52053e69f49e-config\") pod \"route-controller-manager-695c5d4c77-z862z\" (UID: \"822b5eab-8104-4e3d-88e6-52053e69f49e\") " pod="openshift-route-controller-manager/route-controller-manager-695c5d4c77-z862z" Oct 07 08:08:44 crc kubenswrapper[4875]: I1007 08:08:44.781089 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/822b5eab-8104-4e3d-88e6-52053e69f49e-client-ca\") pod \"route-controller-manager-695c5d4c77-z862z\" (UID: \"822b5eab-8104-4e3d-88e6-52053e69f49e\") " pod="openshift-route-controller-manager/route-controller-manager-695c5d4c77-z862z" Oct 07 08:08:44 crc kubenswrapper[4875]: I1007 08:08:44.781122 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/d773ec05-8da3-4e65-8a35-817fad655987-proxy-ca-bundles\") pod \"controller-manager-6468d4b774-6jmgl\" (UID: \"d773ec05-8da3-4e65-8a35-817fad655987\") " pod="openshift-controller-manager/controller-manager-6468d4b774-6jmgl" Oct 07 08:08:44 crc kubenswrapper[4875]: I1007 08:08:44.881790 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/822b5eab-8104-4e3d-88e6-52053e69f49e-serving-cert\") pod \"route-controller-manager-695c5d4c77-z862z\" (UID: \"822b5eab-8104-4e3d-88e6-52053e69f49e\") " pod="openshift-route-controller-manager/route-controller-manager-695c5d4c77-z862z" Oct 07 08:08:44 crc kubenswrapper[4875]: I1007 08:08:44.881865 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d773ec05-8da3-4e65-8a35-817fad655987-serving-cert\") pod \"controller-manager-6468d4b774-6jmgl\" (UID: \"d773ec05-8da3-4e65-8a35-817fad655987\") " pod="openshift-controller-manager/controller-manager-6468d4b774-6jmgl" Oct 07 08:08:44 crc kubenswrapper[4875]: I1007 08:08:44.881911 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d773ec05-8da3-4e65-8a35-817fad655987-config\") pod \"controller-manager-6468d4b774-6jmgl\" (UID: \"d773ec05-8da3-4e65-8a35-817fad655987\") " pod="openshift-controller-manager/controller-manager-6468d4b774-6jmgl" Oct 07 08:08:44 crc kubenswrapper[4875]: I1007 08:08:44.881940 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x75d5\" (UniqueName: \"kubernetes.io/projected/822b5eab-8104-4e3d-88e6-52053e69f49e-kube-api-access-x75d5\") pod \"route-controller-manager-695c5d4c77-z862z\" (UID: \"822b5eab-8104-4e3d-88e6-52053e69f49e\") " pod="openshift-route-controller-manager/route-controller-manager-695c5d4c77-z862z" Oct 07 08:08:44 crc kubenswrapper[4875]: I1007 08:08:44.882012 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/d773ec05-8da3-4e65-8a35-817fad655987-client-ca\") pod \"controller-manager-6468d4b774-6jmgl\" (UID: \"d773ec05-8da3-4e65-8a35-817fad655987\") " pod="openshift-controller-manager/controller-manager-6468d4b774-6jmgl" Oct 07 08:08:44 crc kubenswrapper[4875]: I1007 08:08:44.882039 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-df7vn\" (UniqueName: \"kubernetes.io/projected/d773ec05-8da3-4e65-8a35-817fad655987-kube-api-access-df7vn\") pod \"controller-manager-6468d4b774-6jmgl\" (UID: \"d773ec05-8da3-4e65-8a35-817fad655987\") " pod="openshift-controller-manager/controller-manager-6468d4b774-6jmgl" Oct 07 08:08:44 crc kubenswrapper[4875]: I1007 08:08:44.882065 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/822b5eab-8104-4e3d-88e6-52053e69f49e-config\") pod \"route-controller-manager-695c5d4c77-z862z\" (UID: \"822b5eab-8104-4e3d-88e6-52053e69f49e\") " pod="openshift-route-controller-manager/route-controller-manager-695c5d4c77-z862z" Oct 07 08:08:44 crc kubenswrapper[4875]: I1007 08:08:44.882090 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/822b5eab-8104-4e3d-88e6-52053e69f49e-client-ca\") pod \"route-controller-manager-695c5d4c77-z862z\" (UID: \"822b5eab-8104-4e3d-88e6-52053e69f49e\") " pod="openshift-route-controller-manager/route-controller-manager-695c5d4c77-z862z" Oct 07 08:08:44 crc kubenswrapper[4875]: I1007 08:08:44.882111 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/d773ec05-8da3-4e65-8a35-817fad655987-proxy-ca-bundles\") pod \"controller-manager-6468d4b774-6jmgl\" (UID: \"d773ec05-8da3-4e65-8a35-817fad655987\") " pod="openshift-controller-manager/controller-manager-6468d4b774-6jmgl" Oct 07 08:08:44 crc kubenswrapper[4875]: I1007 08:08:44.883231 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/d773ec05-8da3-4e65-8a35-817fad655987-client-ca\") pod \"controller-manager-6468d4b774-6jmgl\" (UID: \"d773ec05-8da3-4e65-8a35-817fad655987\") " pod="openshift-controller-manager/controller-manager-6468d4b774-6jmgl" Oct 07 08:08:44 crc kubenswrapper[4875]: I1007 08:08:44.883463 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/d773ec05-8da3-4e65-8a35-817fad655987-proxy-ca-bundles\") pod \"controller-manager-6468d4b774-6jmgl\" (UID: \"d773ec05-8da3-4e65-8a35-817fad655987\") " pod="openshift-controller-manager/controller-manager-6468d4b774-6jmgl" Oct 07 08:08:44 crc kubenswrapper[4875]: I1007 08:08:44.883711 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d773ec05-8da3-4e65-8a35-817fad655987-config\") pod \"controller-manager-6468d4b774-6jmgl\" (UID: \"d773ec05-8da3-4e65-8a35-817fad655987\") " pod="openshift-controller-manager/controller-manager-6468d4b774-6jmgl" Oct 07 08:08:44 crc kubenswrapper[4875]: I1007 08:08:44.883854 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/822b5eab-8104-4e3d-88e6-52053e69f49e-client-ca\") pod \"route-controller-manager-695c5d4c77-z862z\" (UID: \"822b5eab-8104-4e3d-88e6-52053e69f49e\") " pod="openshift-route-controller-manager/route-controller-manager-695c5d4c77-z862z" Oct 07 08:08:44 crc kubenswrapper[4875]: I1007 08:08:44.883894 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/822b5eab-8104-4e3d-88e6-52053e69f49e-config\") pod \"route-controller-manager-695c5d4c77-z862z\" (UID: \"822b5eab-8104-4e3d-88e6-52053e69f49e\") " pod="openshift-route-controller-manager/route-controller-manager-695c5d4c77-z862z" Oct 07 08:08:44 crc kubenswrapper[4875]: I1007 08:08:44.889699 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/822b5eab-8104-4e3d-88e6-52053e69f49e-serving-cert\") pod \"route-controller-manager-695c5d4c77-z862z\" (UID: \"822b5eab-8104-4e3d-88e6-52053e69f49e\") " pod="openshift-route-controller-manager/route-controller-manager-695c5d4c77-z862z" Oct 07 08:08:44 crc kubenswrapper[4875]: I1007 08:08:44.891591 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d773ec05-8da3-4e65-8a35-817fad655987-serving-cert\") pod \"controller-manager-6468d4b774-6jmgl\" (UID: \"d773ec05-8da3-4e65-8a35-817fad655987\") " pod="openshift-controller-manager/controller-manager-6468d4b774-6jmgl" Oct 07 08:08:44 crc kubenswrapper[4875]: I1007 08:08:44.906348 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-df7vn\" (UniqueName: \"kubernetes.io/projected/d773ec05-8da3-4e65-8a35-817fad655987-kube-api-access-df7vn\") pod \"controller-manager-6468d4b774-6jmgl\" (UID: \"d773ec05-8da3-4e65-8a35-817fad655987\") " pod="openshift-controller-manager/controller-manager-6468d4b774-6jmgl" Oct 07 08:08:44 crc kubenswrapper[4875]: I1007 08:08:44.913402 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x75d5\" (UniqueName: \"kubernetes.io/projected/822b5eab-8104-4e3d-88e6-52053e69f49e-kube-api-access-x75d5\") pod \"route-controller-manager-695c5d4c77-z862z\" (UID: \"822b5eab-8104-4e3d-88e6-52053e69f49e\") " pod="openshift-route-controller-manager/route-controller-manager-695c5d4c77-z862z" Oct 07 08:08:45 crc kubenswrapper[4875]: I1007 08:08:45.025794 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-695c5d4c77-z862z" Oct 07 08:08:45 crc kubenswrapper[4875]: I1007 08:08:45.036769 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-6468d4b774-6jmgl" Oct 07 08:08:45 crc kubenswrapper[4875]: I1007 08:08:45.283046 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-695c5d4c77-z862z"] Oct 07 08:08:45 crc kubenswrapper[4875]: I1007 08:08:45.332322 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-6468d4b774-6jmgl"] Oct 07 08:08:45 crc kubenswrapper[4875]: I1007 08:08:45.683718 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-695c5d4c77-z862z" event={"ID":"822b5eab-8104-4e3d-88e6-52053e69f49e","Type":"ContainerStarted","Data":"def88d2c0ae9ed7d3f09a013a0397121ad50494da92a961b683771e42c39b871"} Oct 07 08:08:45 crc kubenswrapper[4875]: I1007 08:08:45.684216 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-695c5d4c77-z862z" event={"ID":"822b5eab-8104-4e3d-88e6-52053e69f49e","Type":"ContainerStarted","Data":"381be186c5e4a51336cf707f611ff349c29c2410cddb4b9743e09a08e99c8860"} Oct 07 08:08:45 crc kubenswrapper[4875]: I1007 08:08:45.684665 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-695c5d4c77-z862z" Oct 07 08:08:45 crc kubenswrapper[4875]: I1007 08:08:45.686580 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-6468d4b774-6jmgl" event={"ID":"d773ec05-8da3-4e65-8a35-817fad655987","Type":"ContainerStarted","Data":"a5019d72281e2ea2ca41248f8e56eee6c5016ab58fd8061addbe47835b03ed67"} Oct 07 08:08:45 crc kubenswrapper[4875]: I1007 08:08:45.686614 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-6468d4b774-6jmgl" event={"ID":"d773ec05-8da3-4e65-8a35-817fad655987","Type":"ContainerStarted","Data":"1e06d9166d0992a80b152c94836a484955566f79581fe74ea95737a6b16c8552"} Oct 07 08:08:45 crc kubenswrapper[4875]: I1007 08:08:45.686952 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-6468d4b774-6jmgl" Oct 07 08:08:45 crc kubenswrapper[4875]: I1007 08:08:45.706587 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c26c9450-7ff8-4142-b007-01a00adbb28d" path="/var/lib/kubelet/pods/c26c9450-7ff8-4142-b007-01a00adbb28d/volumes" Oct 07 08:08:45 crc kubenswrapper[4875]: I1007 08:08:45.707421 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="de07f99e-f06a-4800-96f5-ecfebf9630f2" path="/var/lib/kubelet/pods/de07f99e-f06a-4800-96f5-ecfebf9630f2/volumes" Oct 07 08:08:45 crc kubenswrapper[4875]: I1007 08:08:45.707918 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-6468d4b774-6jmgl" Oct 07 08:08:45 crc kubenswrapper[4875]: I1007 08:08:45.725630 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-695c5d4c77-z862z" podStartSLOduration=2.725611824 podStartE2EDuration="2.725611824s" podCreationTimestamp="2025-10-07 08:08:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 08:08:45.722452932 +0000 UTC m=+750.682223475" watchObservedRunningTime="2025-10-07 08:08:45.725611824 +0000 UTC m=+750.685382367" Oct 07 08:08:45 crc kubenswrapper[4875]: I1007 08:08:45.757108 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-6468d4b774-6jmgl" podStartSLOduration=2.7570857479999997 podStartE2EDuration="2.757085748s" podCreationTimestamp="2025-10-07 08:08:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 08:08:45.748576964 +0000 UTC m=+750.708347507" watchObservedRunningTime="2025-10-07 08:08:45.757085748 +0000 UTC m=+750.716856291" Oct 07 08:08:46 crc kubenswrapper[4875]: I1007 08:08:46.320105 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-695c5d4c77-z862z" Oct 07 08:08:47 crc kubenswrapper[4875]: I1007 08:08:47.906992 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-controller-manager-79ccb7884d-7cp64" Oct 07 08:08:47 crc kubenswrapper[4875]: I1007 08:08:47.910259 4875 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Oct 07 08:08:48 crc kubenswrapper[4875]: I1007 08:08:48.625657 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-s9mlf"] Oct 07 08:08:48 crc kubenswrapper[4875]: I1007 08:08:48.629144 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-s9mlf" Oct 07 08:08:48 crc kubenswrapper[4875]: I1007 08:08:48.630692 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-webhook-server-64bf5d555-hw72k"] Oct 07 08:08:48 crc kubenswrapper[4875]: I1007 08:08:48.631502 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-64bf5d555-hw72k" Oct 07 08:08:48 crc kubenswrapper[4875]: I1007 08:08:48.632240 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"frr-startup" Oct 07 08:08:48 crc kubenswrapper[4875]: I1007 08:08:48.632320 4875 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-daemon-dockercfg-rp6m8" Oct 07 08:08:48 crc kubenswrapper[4875]: I1007 08:08:48.632800 4875 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-certs-secret" Oct 07 08:08:48 crc kubenswrapper[4875]: I1007 08:08:48.636232 4875 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-webhook-server-cert" Oct 07 08:08:48 crc kubenswrapper[4875]: I1007 08:08:48.646861 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-64bf5d555-hw72k"] Oct 07 08:08:48 crc kubenswrapper[4875]: I1007 08:08:48.732521 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/a5e85290-9c5d-43cf-b0a8-41c2399d7122-metrics\") pod \"frr-k8s-s9mlf\" (UID: \"a5e85290-9c5d-43cf-b0a8-41c2399d7122\") " pod="metallb-system/frr-k8s-s9mlf" Oct 07 08:08:48 crc kubenswrapper[4875]: I1007 08:08:48.732656 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/a5e85290-9c5d-43cf-b0a8-41c2399d7122-frr-sockets\") pod \"frr-k8s-s9mlf\" (UID: \"a5e85290-9c5d-43cf-b0a8-41c2399d7122\") " pod="metallb-system/frr-k8s-s9mlf" Oct 07 08:08:48 crc kubenswrapper[4875]: I1007 08:08:48.732699 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/a5e85290-9c5d-43cf-b0a8-41c2399d7122-reloader\") pod \"frr-k8s-s9mlf\" (UID: \"a5e85290-9c5d-43cf-b0a8-41c2399d7122\") " pod="metallb-system/frr-k8s-s9mlf" Oct 07 08:08:48 crc kubenswrapper[4875]: I1007 08:08:48.732719 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/a5e85290-9c5d-43cf-b0a8-41c2399d7122-metrics-certs\") pod \"frr-k8s-s9mlf\" (UID: \"a5e85290-9c5d-43cf-b0a8-41c2399d7122\") " pod="metallb-system/frr-k8s-s9mlf" Oct 07 08:08:48 crc kubenswrapper[4875]: I1007 08:08:48.732751 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/2570bc7b-9601-4fe3-8fb7-da9277a529c7-cert\") pod \"frr-k8s-webhook-server-64bf5d555-hw72k\" (UID: \"2570bc7b-9601-4fe3-8fb7-da9277a529c7\") " pod="metallb-system/frr-k8s-webhook-server-64bf5d555-hw72k" Oct 07 08:08:48 crc kubenswrapper[4875]: I1007 08:08:48.732770 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/a5e85290-9c5d-43cf-b0a8-41c2399d7122-frr-startup\") pod \"frr-k8s-s9mlf\" (UID: \"a5e85290-9c5d-43cf-b0a8-41c2399d7122\") " pod="metallb-system/frr-k8s-s9mlf" Oct 07 08:08:48 crc kubenswrapper[4875]: I1007 08:08:48.732998 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c6mgr\" (UniqueName: \"kubernetes.io/projected/2570bc7b-9601-4fe3-8fb7-da9277a529c7-kube-api-access-c6mgr\") pod \"frr-k8s-webhook-server-64bf5d555-hw72k\" (UID: \"2570bc7b-9601-4fe3-8fb7-da9277a529c7\") " pod="metallb-system/frr-k8s-webhook-server-64bf5d555-hw72k" Oct 07 08:08:48 crc kubenswrapper[4875]: I1007 08:08:48.733047 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-62tlp\" (UniqueName: \"kubernetes.io/projected/a5e85290-9c5d-43cf-b0a8-41c2399d7122-kube-api-access-62tlp\") pod \"frr-k8s-s9mlf\" (UID: \"a5e85290-9c5d-43cf-b0a8-41c2399d7122\") " pod="metallb-system/frr-k8s-s9mlf" Oct 07 08:08:48 crc kubenswrapper[4875]: I1007 08:08:48.733079 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/a5e85290-9c5d-43cf-b0a8-41c2399d7122-frr-conf\") pod \"frr-k8s-s9mlf\" (UID: \"a5e85290-9c5d-43cf-b0a8-41c2399d7122\") " pod="metallb-system/frr-k8s-s9mlf" Oct 07 08:08:48 crc kubenswrapper[4875]: I1007 08:08:48.749123 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/speaker-xf2p2"] Oct 07 08:08:48 crc kubenswrapper[4875]: I1007 08:08:48.750129 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-xf2p2" Oct 07 08:08:48 crc kubenswrapper[4875]: I1007 08:08:48.754081 4875 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-certs-secret" Oct 07 08:08:48 crc kubenswrapper[4875]: I1007 08:08:48.754202 4875 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-dockercfg-nrls8" Oct 07 08:08:48 crc kubenswrapper[4875]: I1007 08:08:48.756338 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"metallb-excludel2" Oct 07 08:08:48 crc kubenswrapper[4875]: I1007 08:08:48.756778 4875 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-memberlist" Oct 07 08:08:48 crc kubenswrapper[4875]: I1007 08:08:48.765671 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/controller-68d546b9d8-jlztd"] Oct 07 08:08:48 crc kubenswrapper[4875]: I1007 08:08:48.766591 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-68d546b9d8-jlztd" Oct 07 08:08:48 crc kubenswrapper[4875]: I1007 08:08:48.768089 4875 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-certs-secret" Oct 07 08:08:48 crc kubenswrapper[4875]: I1007 08:08:48.779892 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-68d546b9d8-jlztd"] Oct 07 08:08:48 crc kubenswrapper[4875]: I1007 08:08:48.835077 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/dd7e9a4a-b7f9-4f1d-8268-5ec15e5152f7-memberlist\") pod \"speaker-xf2p2\" (UID: \"dd7e9a4a-b7f9-4f1d-8268-5ec15e5152f7\") " pod="metallb-system/speaker-xf2p2" Oct 07 08:08:48 crc kubenswrapper[4875]: I1007 08:08:48.835153 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/90069e5a-5263-4c20-9c21-caa665096b11-cert\") pod \"controller-68d546b9d8-jlztd\" (UID: \"90069e5a-5263-4c20-9c21-caa665096b11\") " pod="metallb-system/controller-68d546b9d8-jlztd" Oct 07 08:08:48 crc kubenswrapper[4875]: I1007 08:08:48.835196 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ptv44\" (UniqueName: \"kubernetes.io/projected/dd7e9a4a-b7f9-4f1d-8268-5ec15e5152f7-kube-api-access-ptv44\") pod \"speaker-xf2p2\" (UID: \"dd7e9a4a-b7f9-4f1d-8268-5ec15e5152f7\") " pod="metallb-system/speaker-xf2p2" Oct 07 08:08:48 crc kubenswrapper[4875]: I1007 08:08:48.835239 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/a5e85290-9c5d-43cf-b0a8-41c2399d7122-frr-sockets\") pod \"frr-k8s-s9mlf\" (UID: \"a5e85290-9c5d-43cf-b0a8-41c2399d7122\") " pod="metallb-system/frr-k8s-s9mlf" Oct 07 08:08:48 crc kubenswrapper[4875]: I1007 08:08:48.835278 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/90069e5a-5263-4c20-9c21-caa665096b11-metrics-certs\") pod \"controller-68d546b9d8-jlztd\" (UID: \"90069e5a-5263-4c20-9c21-caa665096b11\") " pod="metallb-system/controller-68d546b9d8-jlztd" Oct 07 08:08:48 crc kubenswrapper[4875]: I1007 08:08:48.835399 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/dd7e9a4a-b7f9-4f1d-8268-5ec15e5152f7-metallb-excludel2\") pod \"speaker-xf2p2\" (UID: \"dd7e9a4a-b7f9-4f1d-8268-5ec15e5152f7\") " pod="metallb-system/speaker-xf2p2" Oct 07 08:08:48 crc kubenswrapper[4875]: I1007 08:08:48.835461 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/a5e85290-9c5d-43cf-b0a8-41c2399d7122-reloader\") pod \"frr-k8s-s9mlf\" (UID: \"a5e85290-9c5d-43cf-b0a8-41c2399d7122\") " pod="metallb-system/frr-k8s-s9mlf" Oct 07 08:08:48 crc kubenswrapper[4875]: I1007 08:08:48.835497 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-skxqp\" (UniqueName: \"kubernetes.io/projected/90069e5a-5263-4c20-9c21-caa665096b11-kube-api-access-skxqp\") pod \"controller-68d546b9d8-jlztd\" (UID: \"90069e5a-5263-4c20-9c21-caa665096b11\") " pod="metallb-system/controller-68d546b9d8-jlztd" Oct 07 08:08:48 crc kubenswrapper[4875]: I1007 08:08:48.835555 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/a5e85290-9c5d-43cf-b0a8-41c2399d7122-metrics-certs\") pod \"frr-k8s-s9mlf\" (UID: \"a5e85290-9c5d-43cf-b0a8-41c2399d7122\") " pod="metallb-system/frr-k8s-s9mlf" Oct 07 08:08:48 crc kubenswrapper[4875]: I1007 08:08:48.835626 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/2570bc7b-9601-4fe3-8fb7-da9277a529c7-cert\") pod \"frr-k8s-webhook-server-64bf5d555-hw72k\" (UID: \"2570bc7b-9601-4fe3-8fb7-da9277a529c7\") " pod="metallb-system/frr-k8s-webhook-server-64bf5d555-hw72k" Oct 07 08:08:48 crc kubenswrapper[4875]: I1007 08:08:48.835652 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/a5e85290-9c5d-43cf-b0a8-41c2399d7122-frr-startup\") pod \"frr-k8s-s9mlf\" (UID: \"a5e85290-9c5d-43cf-b0a8-41c2399d7122\") " pod="metallb-system/frr-k8s-s9mlf" Oct 07 08:08:48 crc kubenswrapper[4875]: I1007 08:08:48.835701 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c6mgr\" (UniqueName: \"kubernetes.io/projected/2570bc7b-9601-4fe3-8fb7-da9277a529c7-kube-api-access-c6mgr\") pod \"frr-k8s-webhook-server-64bf5d555-hw72k\" (UID: \"2570bc7b-9601-4fe3-8fb7-da9277a529c7\") " pod="metallb-system/frr-k8s-webhook-server-64bf5d555-hw72k" Oct 07 08:08:48 crc kubenswrapper[4875]: I1007 08:08:48.835736 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-62tlp\" (UniqueName: \"kubernetes.io/projected/a5e85290-9c5d-43cf-b0a8-41c2399d7122-kube-api-access-62tlp\") pod \"frr-k8s-s9mlf\" (UID: \"a5e85290-9c5d-43cf-b0a8-41c2399d7122\") " pod="metallb-system/frr-k8s-s9mlf" Oct 07 08:08:48 crc kubenswrapper[4875]: I1007 08:08:48.835772 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/a5e85290-9c5d-43cf-b0a8-41c2399d7122-frr-conf\") pod \"frr-k8s-s9mlf\" (UID: \"a5e85290-9c5d-43cf-b0a8-41c2399d7122\") " pod="metallb-system/frr-k8s-s9mlf" Oct 07 08:08:48 crc kubenswrapper[4875]: I1007 08:08:48.835823 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/a5e85290-9c5d-43cf-b0a8-41c2399d7122-frr-sockets\") pod \"frr-k8s-s9mlf\" (UID: \"a5e85290-9c5d-43cf-b0a8-41c2399d7122\") " pod="metallb-system/frr-k8s-s9mlf" Oct 07 08:08:48 crc kubenswrapper[4875]: I1007 08:08:48.836122 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/a5e85290-9c5d-43cf-b0a8-41c2399d7122-metrics\") pod \"frr-k8s-s9mlf\" (UID: \"a5e85290-9c5d-43cf-b0a8-41c2399d7122\") " pod="metallb-system/frr-k8s-s9mlf" Oct 07 08:08:48 crc kubenswrapper[4875]: I1007 08:08:48.836199 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/dd7e9a4a-b7f9-4f1d-8268-5ec15e5152f7-metrics-certs\") pod \"speaker-xf2p2\" (UID: \"dd7e9a4a-b7f9-4f1d-8268-5ec15e5152f7\") " pod="metallb-system/speaker-xf2p2" Oct 07 08:08:48 crc kubenswrapper[4875]: I1007 08:08:48.836366 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/a5e85290-9c5d-43cf-b0a8-41c2399d7122-reloader\") pod \"frr-k8s-s9mlf\" (UID: \"a5e85290-9c5d-43cf-b0a8-41c2399d7122\") " pod="metallb-system/frr-k8s-s9mlf" Oct 07 08:08:48 crc kubenswrapper[4875]: E1007 08:08:48.836458 4875 secret.go:188] Couldn't get secret metallb-system/frr-k8s-webhook-server-cert: secret "frr-k8s-webhook-server-cert" not found Oct 07 08:08:48 crc kubenswrapper[4875]: E1007 08:08:48.836527 4875 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/2570bc7b-9601-4fe3-8fb7-da9277a529c7-cert podName:2570bc7b-9601-4fe3-8fb7-da9277a529c7 nodeName:}" failed. No retries permitted until 2025-10-07 08:08:49.3364996 +0000 UTC m=+754.296270143 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/2570bc7b-9601-4fe3-8fb7-da9277a529c7-cert") pod "frr-k8s-webhook-server-64bf5d555-hw72k" (UID: "2570bc7b-9601-4fe3-8fb7-da9277a529c7") : secret "frr-k8s-webhook-server-cert" not found Oct 07 08:08:48 crc kubenswrapper[4875]: I1007 08:08:48.837225 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/a5e85290-9c5d-43cf-b0a8-41c2399d7122-frr-conf\") pod \"frr-k8s-s9mlf\" (UID: \"a5e85290-9c5d-43cf-b0a8-41c2399d7122\") " pod="metallb-system/frr-k8s-s9mlf" Oct 07 08:08:48 crc kubenswrapper[4875]: I1007 08:08:48.837447 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/a5e85290-9c5d-43cf-b0a8-41c2399d7122-metrics\") pod \"frr-k8s-s9mlf\" (UID: \"a5e85290-9c5d-43cf-b0a8-41c2399d7122\") " pod="metallb-system/frr-k8s-s9mlf" Oct 07 08:08:48 crc kubenswrapper[4875]: I1007 08:08:48.838039 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/a5e85290-9c5d-43cf-b0a8-41c2399d7122-frr-startup\") pod \"frr-k8s-s9mlf\" (UID: \"a5e85290-9c5d-43cf-b0a8-41c2399d7122\") " pod="metallb-system/frr-k8s-s9mlf" Oct 07 08:08:48 crc kubenswrapper[4875]: I1007 08:08:48.854584 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c6mgr\" (UniqueName: \"kubernetes.io/projected/2570bc7b-9601-4fe3-8fb7-da9277a529c7-kube-api-access-c6mgr\") pod \"frr-k8s-webhook-server-64bf5d555-hw72k\" (UID: \"2570bc7b-9601-4fe3-8fb7-da9277a529c7\") " pod="metallb-system/frr-k8s-webhook-server-64bf5d555-hw72k" Oct 07 08:08:48 crc kubenswrapper[4875]: I1007 08:08:48.855123 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/a5e85290-9c5d-43cf-b0a8-41c2399d7122-metrics-certs\") pod \"frr-k8s-s9mlf\" (UID: \"a5e85290-9c5d-43cf-b0a8-41c2399d7122\") " pod="metallb-system/frr-k8s-s9mlf" Oct 07 08:08:48 crc kubenswrapper[4875]: I1007 08:08:48.857283 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-62tlp\" (UniqueName: \"kubernetes.io/projected/a5e85290-9c5d-43cf-b0a8-41c2399d7122-kube-api-access-62tlp\") pod \"frr-k8s-s9mlf\" (UID: \"a5e85290-9c5d-43cf-b0a8-41c2399d7122\") " pod="metallb-system/frr-k8s-s9mlf" Oct 07 08:08:48 crc kubenswrapper[4875]: I1007 08:08:48.937862 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/90069e5a-5263-4c20-9c21-caa665096b11-metrics-certs\") pod \"controller-68d546b9d8-jlztd\" (UID: \"90069e5a-5263-4c20-9c21-caa665096b11\") " pod="metallb-system/controller-68d546b9d8-jlztd" Oct 07 08:08:48 crc kubenswrapper[4875]: I1007 08:08:48.938072 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/dd7e9a4a-b7f9-4f1d-8268-5ec15e5152f7-metallb-excludel2\") pod \"speaker-xf2p2\" (UID: \"dd7e9a4a-b7f9-4f1d-8268-5ec15e5152f7\") " pod="metallb-system/speaker-xf2p2" Oct 07 08:08:48 crc kubenswrapper[4875]: I1007 08:08:48.938103 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-skxqp\" (UniqueName: \"kubernetes.io/projected/90069e5a-5263-4c20-9c21-caa665096b11-kube-api-access-skxqp\") pod \"controller-68d546b9d8-jlztd\" (UID: \"90069e5a-5263-4c20-9c21-caa665096b11\") " pod="metallb-system/controller-68d546b9d8-jlztd" Oct 07 08:08:48 crc kubenswrapper[4875]: I1007 08:08:48.938200 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/dd7e9a4a-b7f9-4f1d-8268-5ec15e5152f7-metrics-certs\") pod \"speaker-xf2p2\" (UID: \"dd7e9a4a-b7f9-4f1d-8268-5ec15e5152f7\") " pod="metallb-system/speaker-xf2p2" Oct 07 08:08:48 crc kubenswrapper[4875]: I1007 08:08:48.938239 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/dd7e9a4a-b7f9-4f1d-8268-5ec15e5152f7-memberlist\") pod \"speaker-xf2p2\" (UID: \"dd7e9a4a-b7f9-4f1d-8268-5ec15e5152f7\") " pod="metallb-system/speaker-xf2p2" Oct 07 08:08:48 crc kubenswrapper[4875]: I1007 08:08:48.938288 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/90069e5a-5263-4c20-9c21-caa665096b11-cert\") pod \"controller-68d546b9d8-jlztd\" (UID: \"90069e5a-5263-4c20-9c21-caa665096b11\") " pod="metallb-system/controller-68d546b9d8-jlztd" Oct 07 08:08:48 crc kubenswrapper[4875]: I1007 08:08:48.938321 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ptv44\" (UniqueName: \"kubernetes.io/projected/dd7e9a4a-b7f9-4f1d-8268-5ec15e5152f7-kube-api-access-ptv44\") pod \"speaker-xf2p2\" (UID: \"dd7e9a4a-b7f9-4f1d-8268-5ec15e5152f7\") " pod="metallb-system/speaker-xf2p2" Oct 07 08:08:48 crc kubenswrapper[4875]: I1007 08:08:48.938945 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/dd7e9a4a-b7f9-4f1d-8268-5ec15e5152f7-metallb-excludel2\") pod \"speaker-xf2p2\" (UID: \"dd7e9a4a-b7f9-4f1d-8268-5ec15e5152f7\") " pod="metallb-system/speaker-xf2p2" Oct 07 08:08:48 crc kubenswrapper[4875]: E1007 08:08:48.939083 4875 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Oct 07 08:08:48 crc kubenswrapper[4875]: E1007 08:08:48.939139 4875 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/dd7e9a4a-b7f9-4f1d-8268-5ec15e5152f7-memberlist podName:dd7e9a4a-b7f9-4f1d-8268-5ec15e5152f7 nodeName:}" failed. No retries permitted until 2025-10-07 08:08:49.439120535 +0000 UTC m=+754.398891078 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/dd7e9a4a-b7f9-4f1d-8268-5ec15e5152f7-memberlist") pod "speaker-xf2p2" (UID: "dd7e9a4a-b7f9-4f1d-8268-5ec15e5152f7") : secret "metallb-memberlist" not found Oct 07 08:08:48 crc kubenswrapper[4875]: I1007 08:08:48.942116 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/dd7e9a4a-b7f9-4f1d-8268-5ec15e5152f7-metrics-certs\") pod \"speaker-xf2p2\" (UID: \"dd7e9a4a-b7f9-4f1d-8268-5ec15e5152f7\") " pod="metallb-system/speaker-xf2p2" Oct 07 08:08:48 crc kubenswrapper[4875]: I1007 08:08:48.943036 4875 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Oct 07 08:08:48 crc kubenswrapper[4875]: I1007 08:08:48.943688 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/90069e5a-5263-4c20-9c21-caa665096b11-metrics-certs\") pod \"controller-68d546b9d8-jlztd\" (UID: \"90069e5a-5263-4c20-9c21-caa665096b11\") " pod="metallb-system/controller-68d546b9d8-jlztd" Oct 07 08:08:48 crc kubenswrapper[4875]: I1007 08:08:48.948398 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-s9mlf" Oct 07 08:08:48 crc kubenswrapper[4875]: I1007 08:08:48.952683 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/90069e5a-5263-4c20-9c21-caa665096b11-cert\") pod \"controller-68d546b9d8-jlztd\" (UID: \"90069e5a-5263-4c20-9c21-caa665096b11\") " pod="metallb-system/controller-68d546b9d8-jlztd" Oct 07 08:08:48 crc kubenswrapper[4875]: I1007 08:08:48.958203 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ptv44\" (UniqueName: \"kubernetes.io/projected/dd7e9a4a-b7f9-4f1d-8268-5ec15e5152f7-kube-api-access-ptv44\") pod \"speaker-xf2p2\" (UID: \"dd7e9a4a-b7f9-4f1d-8268-5ec15e5152f7\") " pod="metallb-system/speaker-xf2p2" Oct 07 08:08:48 crc kubenswrapper[4875]: I1007 08:08:48.958361 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-skxqp\" (UniqueName: \"kubernetes.io/projected/90069e5a-5263-4c20-9c21-caa665096b11-kube-api-access-skxqp\") pod \"controller-68d546b9d8-jlztd\" (UID: \"90069e5a-5263-4c20-9c21-caa665096b11\") " pod="metallb-system/controller-68d546b9d8-jlztd" Oct 07 08:08:49 crc kubenswrapper[4875]: I1007 08:08:49.079564 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-68d546b9d8-jlztd" Oct 07 08:08:49 crc kubenswrapper[4875]: I1007 08:08:49.346069 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/2570bc7b-9601-4fe3-8fb7-da9277a529c7-cert\") pod \"frr-k8s-webhook-server-64bf5d555-hw72k\" (UID: \"2570bc7b-9601-4fe3-8fb7-da9277a529c7\") " pod="metallb-system/frr-k8s-webhook-server-64bf5d555-hw72k" Oct 07 08:08:49 crc kubenswrapper[4875]: I1007 08:08:49.353119 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/2570bc7b-9601-4fe3-8fb7-da9277a529c7-cert\") pod \"frr-k8s-webhook-server-64bf5d555-hw72k\" (UID: \"2570bc7b-9601-4fe3-8fb7-da9277a529c7\") " pod="metallb-system/frr-k8s-webhook-server-64bf5d555-hw72k" Oct 07 08:08:49 crc kubenswrapper[4875]: I1007 08:08:49.448412 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/dd7e9a4a-b7f9-4f1d-8268-5ec15e5152f7-memberlist\") pod \"speaker-xf2p2\" (UID: \"dd7e9a4a-b7f9-4f1d-8268-5ec15e5152f7\") " pod="metallb-system/speaker-xf2p2" Oct 07 08:08:49 crc kubenswrapper[4875]: E1007 08:08:49.448738 4875 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Oct 07 08:08:49 crc kubenswrapper[4875]: E1007 08:08:49.448921 4875 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/dd7e9a4a-b7f9-4f1d-8268-5ec15e5152f7-memberlist podName:dd7e9a4a-b7f9-4f1d-8268-5ec15e5152f7 nodeName:}" failed. No retries permitted until 2025-10-07 08:08:50.448856283 +0000 UTC m=+755.408626856 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/dd7e9a4a-b7f9-4f1d-8268-5ec15e5152f7-memberlist") pod "speaker-xf2p2" (UID: "dd7e9a4a-b7f9-4f1d-8268-5ec15e5152f7") : secret "metallb-memberlist" not found Oct 07 08:08:49 crc kubenswrapper[4875]: I1007 08:08:49.541493 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-68d546b9d8-jlztd"] Oct 07 08:08:49 crc kubenswrapper[4875]: I1007 08:08:49.557209 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-64bf5d555-hw72k" Oct 07 08:08:49 crc kubenswrapper[4875]: I1007 08:08:49.744206 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-68d546b9d8-jlztd" event={"ID":"90069e5a-5263-4c20-9c21-caa665096b11","Type":"ContainerStarted","Data":"ab6e6ded32bce46c7fe4bffd6afd0fc26e2c6593362697932f6bbe17f49498fc"} Oct 07 08:08:49 crc kubenswrapper[4875]: I1007 08:08:49.761872 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-s9mlf" event={"ID":"a5e85290-9c5d-43cf-b0a8-41c2399d7122","Type":"ContainerStarted","Data":"70e70b7c638500a82bc80a2228d1c5461a9eac329abea8dcffd9869a16c203dc"} Oct 07 08:08:50 crc kubenswrapper[4875]: I1007 08:08:50.097551 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-64bf5d555-hw72k"] Oct 07 08:08:50 crc kubenswrapper[4875]: W1007 08:08:50.105808 4875 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2570bc7b_9601_4fe3_8fb7_da9277a529c7.slice/crio-27618fe8cbba7f2f56f0508a0477411cf07ee6e255d5a1ab0c4eb0e8bd386f3f WatchSource:0}: Error finding container 27618fe8cbba7f2f56f0508a0477411cf07ee6e255d5a1ab0c4eb0e8bd386f3f: Status 404 returned error can't find the container with id 27618fe8cbba7f2f56f0508a0477411cf07ee6e255d5a1ab0c4eb0e8bd386f3f Oct 07 08:08:50 crc kubenswrapper[4875]: I1007 08:08:50.473061 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/dd7e9a4a-b7f9-4f1d-8268-5ec15e5152f7-memberlist\") pod \"speaker-xf2p2\" (UID: \"dd7e9a4a-b7f9-4f1d-8268-5ec15e5152f7\") " pod="metallb-system/speaker-xf2p2" Oct 07 08:08:50 crc kubenswrapper[4875]: I1007 08:08:50.478376 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/dd7e9a4a-b7f9-4f1d-8268-5ec15e5152f7-memberlist\") pod \"speaker-xf2p2\" (UID: \"dd7e9a4a-b7f9-4f1d-8268-5ec15e5152f7\") " pod="metallb-system/speaker-xf2p2" Oct 07 08:08:50 crc kubenswrapper[4875]: I1007 08:08:50.564814 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-xf2p2" Oct 07 08:08:50 crc kubenswrapper[4875]: I1007 08:08:50.775450 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-68d546b9d8-jlztd" event={"ID":"90069e5a-5263-4c20-9c21-caa665096b11","Type":"ContainerStarted","Data":"0e0c9494697816acfd4d7803468d20d1dc634b76cc44c906d5c528e4365d9faa"} Oct 07 08:08:50 crc kubenswrapper[4875]: I1007 08:08:50.775503 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-68d546b9d8-jlztd" event={"ID":"90069e5a-5263-4c20-9c21-caa665096b11","Type":"ContainerStarted","Data":"360c6541d3f07a914898ea04c68856683a75bb7a3332469d93e28485596696de"} Oct 07 08:08:50 crc kubenswrapper[4875]: I1007 08:08:50.775938 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/controller-68d546b9d8-jlztd" Oct 07 08:08:50 crc kubenswrapper[4875]: I1007 08:08:50.779474 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-64bf5d555-hw72k" event={"ID":"2570bc7b-9601-4fe3-8fb7-da9277a529c7","Type":"ContainerStarted","Data":"27618fe8cbba7f2f56f0508a0477411cf07ee6e255d5a1ab0c4eb0e8bd386f3f"} Oct 07 08:08:50 crc kubenswrapper[4875]: I1007 08:08:50.781223 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-xf2p2" event={"ID":"dd7e9a4a-b7f9-4f1d-8268-5ec15e5152f7","Type":"ContainerStarted","Data":"c29cee2ed03b9862ccba0e9d1dd08c4edfe734585c848df1aa3ff3302eae3211"} Oct 07 08:08:51 crc kubenswrapper[4875]: I1007 08:08:51.792115 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-xf2p2" event={"ID":"dd7e9a4a-b7f9-4f1d-8268-5ec15e5152f7","Type":"ContainerStarted","Data":"b74e97492c756ed186bcf2148e5767848621d9b9a135290415053e45db484cf2"} Oct 07 08:08:51 crc kubenswrapper[4875]: I1007 08:08:51.792555 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-xf2p2" event={"ID":"dd7e9a4a-b7f9-4f1d-8268-5ec15e5152f7","Type":"ContainerStarted","Data":"e313c1a24f101e2eb65cd12d5fbf24ce917f625def17249dbb288b9b43d44347"} Oct 07 08:08:51 crc kubenswrapper[4875]: I1007 08:08:51.792586 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/speaker-xf2p2" Oct 07 08:08:51 crc kubenswrapper[4875]: I1007 08:08:51.816344 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/speaker-xf2p2" podStartSLOduration=3.816326015 podStartE2EDuration="3.816326015s" podCreationTimestamp="2025-10-07 08:08:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 08:08:51.814448844 +0000 UTC m=+756.774219387" watchObservedRunningTime="2025-10-07 08:08:51.816326015 +0000 UTC m=+756.776096558" Oct 07 08:08:51 crc kubenswrapper[4875]: I1007 08:08:51.819175 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/controller-68d546b9d8-jlztd" podStartSLOduration=3.819161337 podStartE2EDuration="3.819161337s" podCreationTimestamp="2025-10-07 08:08:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 08:08:50.797398868 +0000 UTC m=+755.757169411" watchObservedRunningTime="2025-10-07 08:08:51.819161337 +0000 UTC m=+756.778931880" Oct 07 08:08:56 crc kubenswrapper[4875]: I1007 08:08:56.828776 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-64bf5d555-hw72k" event={"ID":"2570bc7b-9601-4fe3-8fb7-da9277a529c7","Type":"ContainerStarted","Data":"a881cbef1ca3a22662cb301dfcbe7e0ad4942e7f5a520f9fc7905bb3e3c8dd6c"} Oct 07 08:08:56 crc kubenswrapper[4875]: I1007 08:08:56.829631 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-webhook-server-64bf5d555-hw72k" Oct 07 08:08:56 crc kubenswrapper[4875]: I1007 08:08:56.830264 4875 generic.go:334] "Generic (PLEG): container finished" podID="a5e85290-9c5d-43cf-b0a8-41c2399d7122" containerID="00e13074c0ec45395c8d0d48a5612ecb5cc1cca37f19271c2b8a5812ffb649ea" exitCode=0 Oct 07 08:08:56 crc kubenswrapper[4875]: I1007 08:08:56.830295 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-s9mlf" event={"ID":"a5e85290-9c5d-43cf-b0a8-41c2399d7122","Type":"ContainerDied","Data":"00e13074c0ec45395c8d0d48a5612ecb5cc1cca37f19271c2b8a5812ffb649ea"} Oct 07 08:08:56 crc kubenswrapper[4875]: I1007 08:08:56.857622 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-webhook-server-64bf5d555-hw72k" podStartSLOduration=2.422649778 podStartE2EDuration="8.857597935s" podCreationTimestamp="2025-10-07 08:08:48 +0000 UTC" firstStartedPulling="2025-10-07 08:08:50.108356255 +0000 UTC m=+755.068126798" lastFinishedPulling="2025-10-07 08:08:56.543304412 +0000 UTC m=+761.503074955" observedRunningTime="2025-10-07 08:08:56.853614536 +0000 UTC m=+761.813385089" watchObservedRunningTime="2025-10-07 08:08:56.857597935 +0000 UTC m=+761.817368478" Oct 07 08:08:57 crc kubenswrapper[4875]: I1007 08:08:57.839207 4875 generic.go:334] "Generic (PLEG): container finished" podID="a5e85290-9c5d-43cf-b0a8-41c2399d7122" containerID="8dc35bfd8690be619e86e5a2951835238c43795e816b82a24fee52dbf142286e" exitCode=0 Oct 07 08:08:57 crc kubenswrapper[4875]: I1007 08:08:57.839321 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-s9mlf" event={"ID":"a5e85290-9c5d-43cf-b0a8-41c2399d7122","Type":"ContainerDied","Data":"8dc35bfd8690be619e86e5a2951835238c43795e816b82a24fee52dbf142286e"} Oct 07 08:08:58 crc kubenswrapper[4875]: I1007 08:08:58.851510 4875 generic.go:334] "Generic (PLEG): container finished" podID="a5e85290-9c5d-43cf-b0a8-41c2399d7122" containerID="646649fb5139232553627411df37afaad2d8d0c8212678c460da773d7674c1ad" exitCode=0 Oct 07 08:08:58 crc kubenswrapper[4875]: I1007 08:08:58.851663 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-s9mlf" event={"ID":"a5e85290-9c5d-43cf-b0a8-41c2399d7122","Type":"ContainerDied","Data":"646649fb5139232553627411df37afaad2d8d0c8212678c460da773d7674c1ad"} Oct 07 08:08:59 crc kubenswrapper[4875]: I1007 08:08:59.091479 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/controller-68d546b9d8-jlztd" Oct 07 08:08:59 crc kubenswrapper[4875]: I1007 08:08:59.862406 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-s9mlf" event={"ID":"a5e85290-9c5d-43cf-b0a8-41c2399d7122","Type":"ContainerStarted","Data":"c6a8a812f822f4c795fe3be2bdc68d786e2382a01a241d1a0426d02524d45cba"} Oct 07 08:08:59 crc kubenswrapper[4875]: I1007 08:08:59.862885 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-s9mlf" event={"ID":"a5e85290-9c5d-43cf-b0a8-41c2399d7122","Type":"ContainerStarted","Data":"e2627f95711c2f56e8fd1b462261a2cbd0220e3c12ee6f03c9da2ee1155a421a"} Oct 07 08:08:59 crc kubenswrapper[4875]: I1007 08:08:59.862913 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-s9mlf" event={"ID":"a5e85290-9c5d-43cf-b0a8-41c2399d7122","Type":"ContainerStarted","Data":"b18b81e4d03b969376122e40ca8b26ad4af713cc12b8218faee84b79686005d9"} Oct 07 08:08:59 crc kubenswrapper[4875]: I1007 08:08:59.862925 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-s9mlf" event={"ID":"a5e85290-9c5d-43cf-b0a8-41c2399d7122","Type":"ContainerStarted","Data":"b274ef98ab6f0f683a31330f7f4df905a7b61d5ed4529cbec5d950e25f27ba1a"} Oct 07 08:08:59 crc kubenswrapper[4875]: I1007 08:08:59.862934 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-s9mlf" event={"ID":"a5e85290-9c5d-43cf-b0a8-41c2399d7122","Type":"ContainerStarted","Data":"35bc79e173038a4a7b4ceafdb56698264d8d8989af1469dde525b125282829f0"} Oct 07 08:09:00 crc kubenswrapper[4875]: I1007 08:09:00.568537 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/speaker-xf2p2" Oct 07 08:09:00 crc kubenswrapper[4875]: I1007 08:09:00.875556 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-s9mlf" event={"ID":"a5e85290-9c5d-43cf-b0a8-41c2399d7122","Type":"ContainerStarted","Data":"46fdad0e65eaddd203a6f894cb6c660b81a6ec4e59eed00206010dfd63712079"} Oct 07 08:09:00 crc kubenswrapper[4875]: I1007 08:09:00.875870 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-s9mlf" Oct 07 08:09:00 crc kubenswrapper[4875]: I1007 08:09:00.912923 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-s9mlf" podStartSLOduration=5.495316422 podStartE2EDuration="12.912855557s" podCreationTimestamp="2025-10-07 08:08:48 +0000 UTC" firstStartedPulling="2025-10-07 08:08:49.102834418 +0000 UTC m=+754.062604971" lastFinishedPulling="2025-10-07 08:08:56.520373523 +0000 UTC m=+761.480144106" observedRunningTime="2025-10-07 08:09:00.910650776 +0000 UTC m=+765.870421369" watchObservedRunningTime="2025-10-07 08:09:00.912855557 +0000 UTC m=+765.872626120" Oct 07 08:09:01 crc kubenswrapper[4875]: I1007 08:09:01.220983 4875 patch_prober.go:28] interesting pod/machine-config-daemon-hx68m container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 07 08:09:01 crc kubenswrapper[4875]: I1007 08:09:01.221057 4875 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" podUID="3928c10c-c3da-41eb-96b2-629d67cfb31f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 07 08:09:01 crc kubenswrapper[4875]: I1007 08:09:01.221108 4875 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" Oct 07 08:09:01 crc kubenswrapper[4875]: I1007 08:09:01.221687 4875 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"fd222e466d65c284b99075ccaec23b4472fee8035bb0e466c3b29064fa8b6524"} pod="openshift-machine-config-operator/machine-config-daemon-hx68m" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 07 08:09:01 crc kubenswrapper[4875]: I1007 08:09:01.221757 4875 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" podUID="3928c10c-c3da-41eb-96b2-629d67cfb31f" containerName="machine-config-daemon" containerID="cri-o://fd222e466d65c284b99075ccaec23b4472fee8035bb0e466c3b29064fa8b6524" gracePeriod=600 Oct 07 08:09:01 crc kubenswrapper[4875]: I1007 08:09:01.883525 4875 generic.go:334] "Generic (PLEG): container finished" podID="3928c10c-c3da-41eb-96b2-629d67cfb31f" containerID="fd222e466d65c284b99075ccaec23b4472fee8035bb0e466c3b29064fa8b6524" exitCode=0 Oct 07 08:09:01 crc kubenswrapper[4875]: I1007 08:09:01.883615 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" event={"ID":"3928c10c-c3da-41eb-96b2-629d67cfb31f","Type":"ContainerDied","Data":"fd222e466d65c284b99075ccaec23b4472fee8035bb0e466c3b29064fa8b6524"} Oct 07 08:09:01 crc kubenswrapper[4875]: I1007 08:09:01.884063 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" event={"ID":"3928c10c-c3da-41eb-96b2-629d67cfb31f","Type":"ContainerStarted","Data":"74477d8e6df862c07a1243437b7c34a1f70af65519c0c2ffe0b07caf6d4382f4"} Oct 07 08:09:01 crc kubenswrapper[4875]: I1007 08:09:01.884088 4875 scope.go:117] "RemoveContainer" containerID="172c3f05751ea969696e793f007f95178f7ffca7170b2062b57b5a3d73382148" Oct 07 08:09:03 crc kubenswrapper[4875]: I1007 08:09:03.452329 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-jbc4d"] Oct 07 08:09:03 crc kubenswrapper[4875]: I1007 08:09:03.453941 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-jbc4d" Oct 07 08:09:03 crc kubenswrapper[4875]: I1007 08:09:03.457317 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"kube-root-ca.crt" Oct 07 08:09:03 crc kubenswrapper[4875]: I1007 08:09:03.461607 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"openshift-service-ca.crt" Oct 07 08:09:03 crc kubenswrapper[4875]: I1007 08:09:03.475068 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-jbc4d"] Oct 07 08:09:03 crc kubenswrapper[4875]: I1007 08:09:03.565133 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-km4wg\" (UniqueName: \"kubernetes.io/projected/f9aab30b-8f4b-4914-8c9a-a229e51cc8a3-kube-api-access-km4wg\") pod \"openstack-operator-index-jbc4d\" (UID: \"f9aab30b-8f4b-4914-8c9a-a229e51cc8a3\") " pod="openstack-operators/openstack-operator-index-jbc4d" Oct 07 08:09:03 crc kubenswrapper[4875]: I1007 08:09:03.667583 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-km4wg\" (UniqueName: \"kubernetes.io/projected/f9aab30b-8f4b-4914-8c9a-a229e51cc8a3-kube-api-access-km4wg\") pod \"openstack-operator-index-jbc4d\" (UID: \"f9aab30b-8f4b-4914-8c9a-a229e51cc8a3\") " pod="openstack-operators/openstack-operator-index-jbc4d" Oct 07 08:09:03 crc kubenswrapper[4875]: I1007 08:09:03.700904 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-km4wg\" (UniqueName: \"kubernetes.io/projected/f9aab30b-8f4b-4914-8c9a-a229e51cc8a3-kube-api-access-km4wg\") pod \"openstack-operator-index-jbc4d\" (UID: \"f9aab30b-8f4b-4914-8c9a-a229e51cc8a3\") " pod="openstack-operators/openstack-operator-index-jbc4d" Oct 07 08:09:03 crc kubenswrapper[4875]: I1007 08:09:03.785388 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-jbc4d" Oct 07 08:09:03 crc kubenswrapper[4875]: I1007 08:09:03.948911 4875 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="metallb-system/frr-k8s-s9mlf" Oct 07 08:09:03 crc kubenswrapper[4875]: I1007 08:09:03.987183 4875 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="metallb-system/frr-k8s-s9mlf" Oct 07 08:09:04 crc kubenswrapper[4875]: I1007 08:09:04.208209 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-jbc4d"] Oct 07 08:09:04 crc kubenswrapper[4875]: I1007 08:09:04.923655 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-jbc4d" event={"ID":"f9aab30b-8f4b-4914-8c9a-a229e51cc8a3","Type":"ContainerStarted","Data":"7d1f143296673a5ee49f68732f62aeea5770a5a699ccce49c01b7f8158b1e49c"} Oct 07 08:09:06 crc kubenswrapper[4875]: I1007 08:09:06.633532 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-jbc4d"] Oct 07 08:09:07 crc kubenswrapper[4875]: I1007 08:09:07.241293 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-l2g5l"] Oct 07 08:09:07 crc kubenswrapper[4875]: I1007 08:09:07.244959 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-l2g5l" Oct 07 08:09:07 crc kubenswrapper[4875]: I1007 08:09:07.248269 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-index-dockercfg-fgwlf" Oct 07 08:09:07 crc kubenswrapper[4875]: I1007 08:09:07.248552 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-l2g5l"] Oct 07 08:09:07 crc kubenswrapper[4875]: I1007 08:09:07.318462 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wmdlh\" (UniqueName: \"kubernetes.io/projected/a7934996-8cb9-4f65-896e-c9755d8b5712-kube-api-access-wmdlh\") pod \"openstack-operator-index-l2g5l\" (UID: \"a7934996-8cb9-4f65-896e-c9755d8b5712\") " pod="openstack-operators/openstack-operator-index-l2g5l" Oct 07 08:09:07 crc kubenswrapper[4875]: I1007 08:09:07.419598 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wmdlh\" (UniqueName: \"kubernetes.io/projected/a7934996-8cb9-4f65-896e-c9755d8b5712-kube-api-access-wmdlh\") pod \"openstack-operator-index-l2g5l\" (UID: \"a7934996-8cb9-4f65-896e-c9755d8b5712\") " pod="openstack-operators/openstack-operator-index-l2g5l" Oct 07 08:09:07 crc kubenswrapper[4875]: I1007 08:09:07.440396 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wmdlh\" (UniqueName: \"kubernetes.io/projected/a7934996-8cb9-4f65-896e-c9755d8b5712-kube-api-access-wmdlh\") pod \"openstack-operator-index-l2g5l\" (UID: \"a7934996-8cb9-4f65-896e-c9755d8b5712\") " pod="openstack-operators/openstack-operator-index-l2g5l" Oct 07 08:09:07 crc kubenswrapper[4875]: I1007 08:09:07.563119 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-l2g5l" Oct 07 08:09:07 crc kubenswrapper[4875]: I1007 08:09:07.947294 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-jbc4d" event={"ID":"f9aab30b-8f4b-4914-8c9a-a229e51cc8a3","Type":"ContainerStarted","Data":"785c3367c27035dde1e80c895b33f16664ef0a4542c972ee5afb2cb4df6bad7a"} Oct 07 08:09:07 crc kubenswrapper[4875]: I1007 08:09:07.949163 4875 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/openstack-operator-index-jbc4d" podUID="f9aab30b-8f4b-4914-8c9a-a229e51cc8a3" containerName="registry-server" containerID="cri-o://785c3367c27035dde1e80c895b33f16664ef0a4542c972ee5afb2cb4df6bad7a" gracePeriod=2 Oct 07 08:09:07 crc kubenswrapper[4875]: I1007 08:09:07.977607 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-jbc4d" podStartSLOduration=2.265927554 podStartE2EDuration="4.97755491s" podCreationTimestamp="2025-10-07 08:09:03 +0000 UTC" firstStartedPulling="2025-10-07 08:09:04.218606261 +0000 UTC m=+769.178376824" lastFinishedPulling="2025-10-07 08:09:06.930233637 +0000 UTC m=+771.890004180" observedRunningTime="2025-10-07 08:09:07.971469933 +0000 UTC m=+772.931240476" watchObservedRunningTime="2025-10-07 08:09:07.97755491 +0000 UTC m=+772.937325453" Oct 07 08:09:07 crc kubenswrapper[4875]: I1007 08:09:07.979378 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-l2g5l"] Oct 07 08:09:08 crc kubenswrapper[4875]: I1007 08:09:08.368042 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-jbc4d" Oct 07 08:09:08 crc kubenswrapper[4875]: I1007 08:09:08.430808 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-km4wg\" (UniqueName: \"kubernetes.io/projected/f9aab30b-8f4b-4914-8c9a-a229e51cc8a3-kube-api-access-km4wg\") pod \"f9aab30b-8f4b-4914-8c9a-a229e51cc8a3\" (UID: \"f9aab30b-8f4b-4914-8c9a-a229e51cc8a3\") " Oct 07 08:09:08 crc kubenswrapper[4875]: I1007 08:09:08.440696 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f9aab30b-8f4b-4914-8c9a-a229e51cc8a3-kube-api-access-km4wg" (OuterVolumeSpecName: "kube-api-access-km4wg") pod "f9aab30b-8f4b-4914-8c9a-a229e51cc8a3" (UID: "f9aab30b-8f4b-4914-8c9a-a229e51cc8a3"). InnerVolumeSpecName "kube-api-access-km4wg". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 08:09:08 crc kubenswrapper[4875]: I1007 08:09:08.531984 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-km4wg\" (UniqueName: \"kubernetes.io/projected/f9aab30b-8f4b-4914-8c9a-a229e51cc8a3-kube-api-access-km4wg\") on node \"crc\" DevicePath \"\"" Oct 07 08:09:08 crc kubenswrapper[4875]: I1007 08:09:08.952294 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-s9mlf" Oct 07 08:09:08 crc kubenswrapper[4875]: I1007 08:09:08.956783 4875 generic.go:334] "Generic (PLEG): container finished" podID="f9aab30b-8f4b-4914-8c9a-a229e51cc8a3" containerID="785c3367c27035dde1e80c895b33f16664ef0a4542c972ee5afb2cb4df6bad7a" exitCode=0 Oct 07 08:09:08 crc kubenswrapper[4875]: I1007 08:09:08.956853 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-jbc4d" event={"ID":"f9aab30b-8f4b-4914-8c9a-a229e51cc8a3","Type":"ContainerDied","Data":"785c3367c27035dde1e80c895b33f16664ef0a4542c972ee5afb2cb4df6bad7a"} Oct 07 08:09:08 crc kubenswrapper[4875]: I1007 08:09:08.956909 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-jbc4d" event={"ID":"f9aab30b-8f4b-4914-8c9a-a229e51cc8a3","Type":"ContainerDied","Data":"7d1f143296673a5ee49f68732f62aeea5770a5a699ccce49c01b7f8158b1e49c"} Oct 07 08:09:08 crc kubenswrapper[4875]: I1007 08:09:08.956977 4875 scope.go:117] "RemoveContainer" containerID="785c3367c27035dde1e80c895b33f16664ef0a4542c972ee5afb2cb4df6bad7a" Oct 07 08:09:08 crc kubenswrapper[4875]: I1007 08:09:08.957097 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-jbc4d" Oct 07 08:09:08 crc kubenswrapper[4875]: I1007 08:09:08.959907 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-l2g5l" event={"ID":"a7934996-8cb9-4f65-896e-c9755d8b5712","Type":"ContainerStarted","Data":"0cbf2b8af085896888c00b8d015a290eb66308cfe8386176d18d3d3bb9070ffa"} Oct 07 08:09:08 crc kubenswrapper[4875]: I1007 08:09:08.959965 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-l2g5l" event={"ID":"a7934996-8cb9-4f65-896e-c9755d8b5712","Type":"ContainerStarted","Data":"433cef7a680865b82b6014061e16e91e5d8fff6be8723c1ef8c596e80db83ca7"} Oct 07 08:09:08 crc kubenswrapper[4875]: I1007 08:09:08.978468 4875 scope.go:117] "RemoveContainer" containerID="785c3367c27035dde1e80c895b33f16664ef0a4542c972ee5afb2cb4df6bad7a" Oct 07 08:09:08 crc kubenswrapper[4875]: E1007 08:09:08.979274 4875 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"785c3367c27035dde1e80c895b33f16664ef0a4542c972ee5afb2cb4df6bad7a\": container with ID starting with 785c3367c27035dde1e80c895b33f16664ef0a4542c972ee5afb2cb4df6bad7a not found: ID does not exist" containerID="785c3367c27035dde1e80c895b33f16664ef0a4542c972ee5afb2cb4df6bad7a" Oct 07 08:09:08 crc kubenswrapper[4875]: I1007 08:09:08.979331 4875 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"785c3367c27035dde1e80c895b33f16664ef0a4542c972ee5afb2cb4df6bad7a"} err="failed to get container status \"785c3367c27035dde1e80c895b33f16664ef0a4542c972ee5afb2cb4df6bad7a\": rpc error: code = NotFound desc = could not find container \"785c3367c27035dde1e80c895b33f16664ef0a4542c972ee5afb2cb4df6bad7a\": container with ID starting with 785c3367c27035dde1e80c895b33f16664ef0a4542c972ee5afb2cb4df6bad7a not found: ID does not exist" Oct 07 08:09:09 crc kubenswrapper[4875]: I1007 08:09:09.006896 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-l2g5l" podStartSLOduration=1.959185895 podStartE2EDuration="2.006859051s" podCreationTimestamp="2025-10-07 08:09:07 +0000 UTC" firstStartedPulling="2025-10-07 08:09:08.056180602 +0000 UTC m=+773.015951145" lastFinishedPulling="2025-10-07 08:09:08.103853748 +0000 UTC m=+773.063624301" observedRunningTime="2025-10-07 08:09:08.998236294 +0000 UTC m=+773.958006837" watchObservedRunningTime="2025-10-07 08:09:09.006859051 +0000 UTC m=+773.966629594" Oct 07 08:09:09 crc kubenswrapper[4875]: I1007 08:09:09.013589 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-jbc4d"] Oct 07 08:09:09 crc kubenswrapper[4875]: I1007 08:09:09.016869 4875 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/openstack-operator-index-jbc4d"] Oct 07 08:09:09 crc kubenswrapper[4875]: I1007 08:09:09.562598 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-webhook-server-64bf5d555-hw72k" Oct 07 08:09:09 crc kubenswrapper[4875]: I1007 08:09:09.704950 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f9aab30b-8f4b-4914-8c9a-a229e51cc8a3" path="/var/lib/kubelet/pods/f9aab30b-8f4b-4914-8c9a-a229e51cc8a3/volumes" Oct 07 08:09:17 crc kubenswrapper[4875]: I1007 08:09:17.563730 4875 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/openstack-operator-index-l2g5l" Oct 07 08:09:17 crc kubenswrapper[4875]: I1007 08:09:17.564484 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-index-l2g5l" Oct 07 08:09:17 crc kubenswrapper[4875]: I1007 08:09:17.611863 4875 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/openstack-operator-index-l2g5l" Oct 07 08:09:18 crc kubenswrapper[4875]: I1007 08:09:18.071583 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-index-l2g5l" Oct 07 08:09:25 crc kubenswrapper[4875]: I1007 08:09:25.133035 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/90bf58ce7772111da4507f6aa87e868c7b0c73a47911b20bcd38811633gjmhr"] Oct 07 08:09:25 crc kubenswrapper[4875]: E1007 08:09:25.133977 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f9aab30b-8f4b-4914-8c9a-a229e51cc8a3" containerName="registry-server" Oct 07 08:09:25 crc kubenswrapper[4875]: I1007 08:09:25.133995 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="f9aab30b-8f4b-4914-8c9a-a229e51cc8a3" containerName="registry-server" Oct 07 08:09:25 crc kubenswrapper[4875]: I1007 08:09:25.134132 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="f9aab30b-8f4b-4914-8c9a-a229e51cc8a3" containerName="registry-server" Oct 07 08:09:25 crc kubenswrapper[4875]: I1007 08:09:25.134997 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/90bf58ce7772111da4507f6aa87e868c7b0c73a47911b20bcd38811633gjmhr" Oct 07 08:09:25 crc kubenswrapper[4875]: I1007 08:09:25.140532 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-c946r" Oct 07 08:09:25 crc kubenswrapper[4875]: I1007 08:09:25.145682 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/90bf58ce7772111da4507f6aa87e868c7b0c73a47911b20bcd38811633gjmhr"] Oct 07 08:09:25 crc kubenswrapper[4875]: I1007 08:09:25.257116 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/5a54ab75-829f-4945-9c7e-c4566c15b8e2-util\") pod \"90bf58ce7772111da4507f6aa87e868c7b0c73a47911b20bcd38811633gjmhr\" (UID: \"5a54ab75-829f-4945-9c7e-c4566c15b8e2\") " pod="openstack-operators/90bf58ce7772111da4507f6aa87e868c7b0c73a47911b20bcd38811633gjmhr" Oct 07 08:09:25 crc kubenswrapper[4875]: I1007 08:09:25.257185 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fqst8\" (UniqueName: \"kubernetes.io/projected/5a54ab75-829f-4945-9c7e-c4566c15b8e2-kube-api-access-fqst8\") pod \"90bf58ce7772111da4507f6aa87e868c7b0c73a47911b20bcd38811633gjmhr\" (UID: \"5a54ab75-829f-4945-9c7e-c4566c15b8e2\") " pod="openstack-operators/90bf58ce7772111da4507f6aa87e868c7b0c73a47911b20bcd38811633gjmhr" Oct 07 08:09:25 crc kubenswrapper[4875]: I1007 08:09:25.257218 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/5a54ab75-829f-4945-9c7e-c4566c15b8e2-bundle\") pod \"90bf58ce7772111da4507f6aa87e868c7b0c73a47911b20bcd38811633gjmhr\" (UID: \"5a54ab75-829f-4945-9c7e-c4566c15b8e2\") " pod="openstack-operators/90bf58ce7772111da4507f6aa87e868c7b0c73a47911b20bcd38811633gjmhr" Oct 07 08:09:25 crc kubenswrapper[4875]: I1007 08:09:25.359193 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/5a54ab75-829f-4945-9c7e-c4566c15b8e2-util\") pod \"90bf58ce7772111da4507f6aa87e868c7b0c73a47911b20bcd38811633gjmhr\" (UID: \"5a54ab75-829f-4945-9c7e-c4566c15b8e2\") " pod="openstack-operators/90bf58ce7772111da4507f6aa87e868c7b0c73a47911b20bcd38811633gjmhr" Oct 07 08:09:25 crc kubenswrapper[4875]: I1007 08:09:25.359277 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fqst8\" (UniqueName: \"kubernetes.io/projected/5a54ab75-829f-4945-9c7e-c4566c15b8e2-kube-api-access-fqst8\") pod \"90bf58ce7772111da4507f6aa87e868c7b0c73a47911b20bcd38811633gjmhr\" (UID: \"5a54ab75-829f-4945-9c7e-c4566c15b8e2\") " pod="openstack-operators/90bf58ce7772111da4507f6aa87e868c7b0c73a47911b20bcd38811633gjmhr" Oct 07 08:09:25 crc kubenswrapper[4875]: I1007 08:09:25.359304 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/5a54ab75-829f-4945-9c7e-c4566c15b8e2-bundle\") pod \"90bf58ce7772111da4507f6aa87e868c7b0c73a47911b20bcd38811633gjmhr\" (UID: \"5a54ab75-829f-4945-9c7e-c4566c15b8e2\") " pod="openstack-operators/90bf58ce7772111da4507f6aa87e868c7b0c73a47911b20bcd38811633gjmhr" Oct 07 08:09:25 crc kubenswrapper[4875]: I1007 08:09:25.359849 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/5a54ab75-829f-4945-9c7e-c4566c15b8e2-util\") pod \"90bf58ce7772111da4507f6aa87e868c7b0c73a47911b20bcd38811633gjmhr\" (UID: \"5a54ab75-829f-4945-9c7e-c4566c15b8e2\") " pod="openstack-operators/90bf58ce7772111da4507f6aa87e868c7b0c73a47911b20bcd38811633gjmhr" Oct 07 08:09:25 crc kubenswrapper[4875]: I1007 08:09:25.361429 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/5a54ab75-829f-4945-9c7e-c4566c15b8e2-bundle\") pod \"90bf58ce7772111da4507f6aa87e868c7b0c73a47911b20bcd38811633gjmhr\" (UID: \"5a54ab75-829f-4945-9c7e-c4566c15b8e2\") " pod="openstack-operators/90bf58ce7772111da4507f6aa87e868c7b0c73a47911b20bcd38811633gjmhr" Oct 07 08:09:25 crc kubenswrapper[4875]: I1007 08:09:25.385117 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fqst8\" (UniqueName: \"kubernetes.io/projected/5a54ab75-829f-4945-9c7e-c4566c15b8e2-kube-api-access-fqst8\") pod \"90bf58ce7772111da4507f6aa87e868c7b0c73a47911b20bcd38811633gjmhr\" (UID: \"5a54ab75-829f-4945-9c7e-c4566c15b8e2\") " pod="openstack-operators/90bf58ce7772111da4507f6aa87e868c7b0c73a47911b20bcd38811633gjmhr" Oct 07 08:09:25 crc kubenswrapper[4875]: I1007 08:09:25.464920 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/90bf58ce7772111da4507f6aa87e868c7b0c73a47911b20bcd38811633gjmhr" Oct 07 08:09:25 crc kubenswrapper[4875]: I1007 08:09:25.910767 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/90bf58ce7772111da4507f6aa87e868c7b0c73a47911b20bcd38811633gjmhr"] Oct 07 08:09:25 crc kubenswrapper[4875]: W1007 08:09:25.921745 4875 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5a54ab75_829f_4945_9c7e_c4566c15b8e2.slice/crio-7a6bc5d29982818adecd0a11f92c2b133d1ce5eccf04d0c991593505b718314d WatchSource:0}: Error finding container 7a6bc5d29982818adecd0a11f92c2b133d1ce5eccf04d0c991593505b718314d: Status 404 returned error can't find the container with id 7a6bc5d29982818adecd0a11f92c2b133d1ce5eccf04d0c991593505b718314d Oct 07 08:09:26 crc kubenswrapper[4875]: I1007 08:09:26.106007 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/90bf58ce7772111da4507f6aa87e868c7b0c73a47911b20bcd38811633gjmhr" event={"ID":"5a54ab75-829f-4945-9c7e-c4566c15b8e2","Type":"ContainerStarted","Data":"185e0ece3713740d0841c6977583b014595611a7faa10e61de0375abcf0a4ff0"} Oct 07 08:09:26 crc kubenswrapper[4875]: I1007 08:09:26.106416 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/90bf58ce7772111da4507f6aa87e868c7b0c73a47911b20bcd38811633gjmhr" event={"ID":"5a54ab75-829f-4945-9c7e-c4566c15b8e2","Type":"ContainerStarted","Data":"7a6bc5d29982818adecd0a11f92c2b133d1ce5eccf04d0c991593505b718314d"} Oct 07 08:09:27 crc kubenswrapper[4875]: I1007 08:09:27.115282 4875 generic.go:334] "Generic (PLEG): container finished" podID="5a54ab75-829f-4945-9c7e-c4566c15b8e2" containerID="185e0ece3713740d0841c6977583b014595611a7faa10e61de0375abcf0a4ff0" exitCode=0 Oct 07 08:09:27 crc kubenswrapper[4875]: I1007 08:09:27.115340 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/90bf58ce7772111da4507f6aa87e868c7b0c73a47911b20bcd38811633gjmhr" event={"ID":"5a54ab75-829f-4945-9c7e-c4566c15b8e2","Type":"ContainerDied","Data":"185e0ece3713740d0841c6977583b014595611a7faa10e61de0375abcf0a4ff0"} Oct 07 08:09:28 crc kubenswrapper[4875]: I1007 08:09:28.123212 4875 generic.go:334] "Generic (PLEG): container finished" podID="5a54ab75-829f-4945-9c7e-c4566c15b8e2" containerID="8460e9c75c1bf02bfd65294fa591814c153d86e11184076132315b4004c25fb5" exitCode=0 Oct 07 08:09:28 crc kubenswrapper[4875]: I1007 08:09:28.123339 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/90bf58ce7772111da4507f6aa87e868c7b0c73a47911b20bcd38811633gjmhr" event={"ID":"5a54ab75-829f-4945-9c7e-c4566c15b8e2","Type":"ContainerDied","Data":"8460e9c75c1bf02bfd65294fa591814c153d86e11184076132315b4004c25fb5"} Oct 07 08:09:29 crc kubenswrapper[4875]: I1007 08:09:29.135441 4875 generic.go:334] "Generic (PLEG): container finished" podID="5a54ab75-829f-4945-9c7e-c4566c15b8e2" containerID="49f9edcf2f4d7b1019d6fd8285fb2d02aa6abb0486d1d8a8a449a12c8c6c96d0" exitCode=0 Oct 07 08:09:29 crc kubenswrapper[4875]: I1007 08:09:29.135940 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/90bf58ce7772111da4507f6aa87e868c7b0c73a47911b20bcd38811633gjmhr" event={"ID":"5a54ab75-829f-4945-9c7e-c4566c15b8e2","Type":"ContainerDied","Data":"49f9edcf2f4d7b1019d6fd8285fb2d02aa6abb0486d1d8a8a449a12c8c6c96d0"} Oct 07 08:09:30 crc kubenswrapper[4875]: I1007 08:09:30.104228 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-cvz92"] Oct 07 08:09:30 crc kubenswrapper[4875]: I1007 08:09:30.105704 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-cvz92" Oct 07 08:09:30 crc kubenswrapper[4875]: I1007 08:09:30.128232 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-cvz92"] Oct 07 08:09:30 crc kubenswrapper[4875]: I1007 08:09:30.255087 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fdf0fdca-7c30-4b30-ba3a-d996471db50f-utilities\") pod \"redhat-operators-cvz92\" (UID: \"fdf0fdca-7c30-4b30-ba3a-d996471db50f\") " pod="openshift-marketplace/redhat-operators-cvz92" Oct 07 08:09:30 crc kubenswrapper[4875]: I1007 08:09:30.255150 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kj6sl\" (UniqueName: \"kubernetes.io/projected/fdf0fdca-7c30-4b30-ba3a-d996471db50f-kube-api-access-kj6sl\") pod \"redhat-operators-cvz92\" (UID: \"fdf0fdca-7c30-4b30-ba3a-d996471db50f\") " pod="openshift-marketplace/redhat-operators-cvz92" Oct 07 08:09:30 crc kubenswrapper[4875]: I1007 08:09:30.255244 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fdf0fdca-7c30-4b30-ba3a-d996471db50f-catalog-content\") pod \"redhat-operators-cvz92\" (UID: \"fdf0fdca-7c30-4b30-ba3a-d996471db50f\") " pod="openshift-marketplace/redhat-operators-cvz92" Oct 07 08:09:30 crc kubenswrapper[4875]: I1007 08:09:30.357178 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fdf0fdca-7c30-4b30-ba3a-d996471db50f-utilities\") pod \"redhat-operators-cvz92\" (UID: \"fdf0fdca-7c30-4b30-ba3a-d996471db50f\") " pod="openshift-marketplace/redhat-operators-cvz92" Oct 07 08:09:30 crc kubenswrapper[4875]: I1007 08:09:30.356555 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fdf0fdca-7c30-4b30-ba3a-d996471db50f-utilities\") pod \"redhat-operators-cvz92\" (UID: \"fdf0fdca-7c30-4b30-ba3a-d996471db50f\") " pod="openshift-marketplace/redhat-operators-cvz92" Oct 07 08:09:30 crc kubenswrapper[4875]: I1007 08:09:30.357287 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kj6sl\" (UniqueName: \"kubernetes.io/projected/fdf0fdca-7c30-4b30-ba3a-d996471db50f-kube-api-access-kj6sl\") pod \"redhat-operators-cvz92\" (UID: \"fdf0fdca-7c30-4b30-ba3a-d996471db50f\") " pod="openshift-marketplace/redhat-operators-cvz92" Oct 07 08:09:30 crc kubenswrapper[4875]: I1007 08:09:30.357700 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fdf0fdca-7c30-4b30-ba3a-d996471db50f-catalog-content\") pod \"redhat-operators-cvz92\" (UID: \"fdf0fdca-7c30-4b30-ba3a-d996471db50f\") " pod="openshift-marketplace/redhat-operators-cvz92" Oct 07 08:09:30 crc kubenswrapper[4875]: I1007 08:09:30.357987 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fdf0fdca-7c30-4b30-ba3a-d996471db50f-catalog-content\") pod \"redhat-operators-cvz92\" (UID: \"fdf0fdca-7c30-4b30-ba3a-d996471db50f\") " pod="openshift-marketplace/redhat-operators-cvz92" Oct 07 08:09:30 crc kubenswrapper[4875]: I1007 08:09:30.392445 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kj6sl\" (UniqueName: \"kubernetes.io/projected/fdf0fdca-7c30-4b30-ba3a-d996471db50f-kube-api-access-kj6sl\") pod \"redhat-operators-cvz92\" (UID: \"fdf0fdca-7c30-4b30-ba3a-d996471db50f\") " pod="openshift-marketplace/redhat-operators-cvz92" Oct 07 08:09:30 crc kubenswrapper[4875]: I1007 08:09:30.431604 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-cvz92" Oct 07 08:09:30 crc kubenswrapper[4875]: I1007 08:09:30.495141 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/90bf58ce7772111da4507f6aa87e868c7b0c73a47911b20bcd38811633gjmhr" Oct 07 08:09:30 crc kubenswrapper[4875]: I1007 08:09:30.661648 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqst8\" (UniqueName: \"kubernetes.io/projected/5a54ab75-829f-4945-9c7e-c4566c15b8e2-kube-api-access-fqst8\") pod \"5a54ab75-829f-4945-9c7e-c4566c15b8e2\" (UID: \"5a54ab75-829f-4945-9c7e-c4566c15b8e2\") " Oct 07 08:09:30 crc kubenswrapper[4875]: I1007 08:09:30.661751 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/5a54ab75-829f-4945-9c7e-c4566c15b8e2-util\") pod \"5a54ab75-829f-4945-9c7e-c4566c15b8e2\" (UID: \"5a54ab75-829f-4945-9c7e-c4566c15b8e2\") " Oct 07 08:09:30 crc kubenswrapper[4875]: I1007 08:09:30.661838 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/5a54ab75-829f-4945-9c7e-c4566c15b8e2-bundle\") pod \"5a54ab75-829f-4945-9c7e-c4566c15b8e2\" (UID: \"5a54ab75-829f-4945-9c7e-c4566c15b8e2\") " Oct 07 08:09:30 crc kubenswrapper[4875]: I1007 08:09:30.663553 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5a54ab75-829f-4945-9c7e-c4566c15b8e2-bundle" (OuterVolumeSpecName: "bundle") pod "5a54ab75-829f-4945-9c7e-c4566c15b8e2" (UID: "5a54ab75-829f-4945-9c7e-c4566c15b8e2"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 08:09:30 crc kubenswrapper[4875]: I1007 08:09:30.682837 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5a54ab75-829f-4945-9c7e-c4566c15b8e2-kube-api-access-fqst8" (OuterVolumeSpecName: "kube-api-access-fqst8") pod "5a54ab75-829f-4945-9c7e-c4566c15b8e2" (UID: "5a54ab75-829f-4945-9c7e-c4566c15b8e2"). InnerVolumeSpecName "kube-api-access-fqst8". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 08:09:30 crc kubenswrapper[4875]: I1007 08:09:30.685720 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5a54ab75-829f-4945-9c7e-c4566c15b8e2-util" (OuterVolumeSpecName: "util") pod "5a54ab75-829f-4945-9c7e-c4566c15b8e2" (UID: "5a54ab75-829f-4945-9c7e-c4566c15b8e2"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 08:09:30 crc kubenswrapper[4875]: I1007 08:09:30.764217 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqst8\" (UniqueName: \"kubernetes.io/projected/5a54ab75-829f-4945-9c7e-c4566c15b8e2-kube-api-access-fqst8\") on node \"crc\" DevicePath \"\"" Oct 07 08:09:30 crc kubenswrapper[4875]: I1007 08:09:30.764279 4875 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/5a54ab75-829f-4945-9c7e-c4566c15b8e2-util\") on node \"crc\" DevicePath \"\"" Oct 07 08:09:30 crc kubenswrapper[4875]: I1007 08:09:30.764300 4875 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/5a54ab75-829f-4945-9c7e-c4566c15b8e2-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 08:09:30 crc kubenswrapper[4875]: I1007 08:09:30.871654 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-cvz92"] Oct 07 08:09:30 crc kubenswrapper[4875]: W1007 08:09:30.885385 4875 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podfdf0fdca_7c30_4b30_ba3a_d996471db50f.slice/crio-4b33679d6839c42bd3b3bd128f14822aedae49cab4e8dc04dfa6f1599969c407 WatchSource:0}: Error finding container 4b33679d6839c42bd3b3bd128f14822aedae49cab4e8dc04dfa6f1599969c407: Status 404 returned error can't find the container with id 4b33679d6839c42bd3b3bd128f14822aedae49cab4e8dc04dfa6f1599969c407 Oct 07 08:09:31 crc kubenswrapper[4875]: I1007 08:09:31.168339 4875 generic.go:334] "Generic (PLEG): container finished" podID="fdf0fdca-7c30-4b30-ba3a-d996471db50f" containerID="1c3a81b40f7dc3e707d1aa5552f33af56911ecce7d78c8723fc068e4f51d25bb" exitCode=0 Oct 07 08:09:31 crc kubenswrapper[4875]: I1007 08:09:31.168408 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cvz92" event={"ID":"fdf0fdca-7c30-4b30-ba3a-d996471db50f","Type":"ContainerDied","Data":"1c3a81b40f7dc3e707d1aa5552f33af56911ecce7d78c8723fc068e4f51d25bb"} Oct 07 08:09:31 crc kubenswrapper[4875]: I1007 08:09:31.168479 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cvz92" event={"ID":"fdf0fdca-7c30-4b30-ba3a-d996471db50f","Type":"ContainerStarted","Data":"4b33679d6839c42bd3b3bd128f14822aedae49cab4e8dc04dfa6f1599969c407"} Oct 07 08:09:31 crc kubenswrapper[4875]: I1007 08:09:31.173160 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/90bf58ce7772111da4507f6aa87e868c7b0c73a47911b20bcd38811633gjmhr" event={"ID":"5a54ab75-829f-4945-9c7e-c4566c15b8e2","Type":"ContainerDied","Data":"7a6bc5d29982818adecd0a11f92c2b133d1ce5eccf04d0c991593505b718314d"} Oct 07 08:09:31 crc kubenswrapper[4875]: I1007 08:09:31.173208 4875 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7a6bc5d29982818adecd0a11f92c2b133d1ce5eccf04d0c991593505b718314d" Oct 07 08:09:31 crc kubenswrapper[4875]: I1007 08:09:31.173319 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/90bf58ce7772111da4507f6aa87e868c7b0c73a47911b20bcd38811633gjmhr" Oct 07 08:09:32 crc kubenswrapper[4875]: I1007 08:09:32.181985 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cvz92" event={"ID":"fdf0fdca-7c30-4b30-ba3a-d996471db50f","Type":"ContainerStarted","Data":"cd1880e5be55a1cad64c308fa333b91115089ba0f57d92fb0c848831adba12e5"} Oct 07 08:09:33 crc kubenswrapper[4875]: I1007 08:09:33.192784 4875 generic.go:334] "Generic (PLEG): container finished" podID="fdf0fdca-7c30-4b30-ba3a-d996471db50f" containerID="cd1880e5be55a1cad64c308fa333b91115089ba0f57d92fb0c848831adba12e5" exitCode=0 Oct 07 08:09:33 crc kubenswrapper[4875]: I1007 08:09:33.192838 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cvz92" event={"ID":"fdf0fdca-7c30-4b30-ba3a-d996471db50f","Type":"ContainerDied","Data":"cd1880e5be55a1cad64c308fa333b91115089ba0f57d92fb0c848831adba12e5"} Oct 07 08:09:34 crc kubenswrapper[4875]: I1007 08:09:34.207196 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cvz92" event={"ID":"fdf0fdca-7c30-4b30-ba3a-d996471db50f","Type":"ContainerStarted","Data":"49166626255b32ae2bdba9b52679c9cb12b11c5f2b5572373d7013da9bbf33bb"} Oct 07 08:09:34 crc kubenswrapper[4875]: I1007 08:09:34.240115 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-cvz92" podStartSLOduration=1.676108058 podStartE2EDuration="4.24009495s" podCreationTimestamp="2025-10-07 08:09:30 +0000 UTC" firstStartedPulling="2025-10-07 08:09:31.170565245 +0000 UTC m=+796.130335788" lastFinishedPulling="2025-10-07 08:09:33.734552087 +0000 UTC m=+798.694322680" observedRunningTime="2025-10-07 08:09:34.239260194 +0000 UTC m=+799.199030757" watchObservedRunningTime="2025-10-07 08:09:34.24009495 +0000 UTC m=+799.199865493" Oct 07 08:09:36 crc kubenswrapper[4875]: I1007 08:09:36.504935 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-operator-fb44c8bf6-4qlwf"] Oct 07 08:09:36 crc kubenswrapper[4875]: E1007 08:09:36.505686 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5a54ab75-829f-4945-9c7e-c4566c15b8e2" containerName="pull" Oct 07 08:09:36 crc kubenswrapper[4875]: I1007 08:09:36.505704 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="5a54ab75-829f-4945-9c7e-c4566c15b8e2" containerName="pull" Oct 07 08:09:36 crc kubenswrapper[4875]: E1007 08:09:36.505731 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5a54ab75-829f-4945-9c7e-c4566c15b8e2" containerName="extract" Oct 07 08:09:36 crc kubenswrapper[4875]: I1007 08:09:36.505739 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="5a54ab75-829f-4945-9c7e-c4566c15b8e2" containerName="extract" Oct 07 08:09:36 crc kubenswrapper[4875]: E1007 08:09:36.505748 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5a54ab75-829f-4945-9c7e-c4566c15b8e2" containerName="util" Oct 07 08:09:36 crc kubenswrapper[4875]: I1007 08:09:36.505757 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="5a54ab75-829f-4945-9c7e-c4566c15b8e2" containerName="util" Oct 07 08:09:36 crc kubenswrapper[4875]: I1007 08:09:36.505924 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="5a54ab75-829f-4945-9c7e-c4566c15b8e2" containerName="extract" Oct 07 08:09:36 crc kubenswrapper[4875]: I1007 08:09:36.506841 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-fb44c8bf6-4qlwf" Oct 07 08:09:36 crc kubenswrapper[4875]: W1007 08:09:36.509786 4875 reflector.go:561] object-"openstack-operators"/"openstack-operator-controller-operator-dockercfg-dk9zg": failed to list *v1.Secret: secrets "openstack-operator-controller-operator-dockercfg-dk9zg" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openstack-operators": no relationship found between node 'crc' and this object Oct 07 08:09:36 crc kubenswrapper[4875]: E1007 08:09:36.509843 4875 reflector.go:158] "Unhandled Error" err="object-\"openstack-operators\"/\"openstack-operator-controller-operator-dockercfg-dk9zg\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"openstack-operator-controller-operator-dockercfg-dk9zg\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openstack-operators\": no relationship found between node 'crc' and this object" logger="UnhandledError" Oct 07 08:09:36 crc kubenswrapper[4875]: I1007 08:09:36.541318 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-fb44c8bf6-4qlwf"] Oct 07 08:09:36 crc kubenswrapper[4875]: I1007 08:09:36.669896 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tpf94\" (UniqueName: \"kubernetes.io/projected/4180c049-4ede-4df6-929d-4a3250404f38-kube-api-access-tpf94\") pod \"openstack-operator-controller-operator-fb44c8bf6-4qlwf\" (UID: \"4180c049-4ede-4df6-929d-4a3250404f38\") " pod="openstack-operators/openstack-operator-controller-operator-fb44c8bf6-4qlwf" Oct 07 08:09:36 crc kubenswrapper[4875]: I1007 08:09:36.773378 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tpf94\" (UniqueName: \"kubernetes.io/projected/4180c049-4ede-4df6-929d-4a3250404f38-kube-api-access-tpf94\") pod \"openstack-operator-controller-operator-fb44c8bf6-4qlwf\" (UID: \"4180c049-4ede-4df6-929d-4a3250404f38\") " pod="openstack-operators/openstack-operator-controller-operator-fb44c8bf6-4qlwf" Oct 07 08:09:36 crc kubenswrapper[4875]: I1007 08:09:36.794695 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tpf94\" (UniqueName: \"kubernetes.io/projected/4180c049-4ede-4df6-929d-4a3250404f38-kube-api-access-tpf94\") pod \"openstack-operator-controller-operator-fb44c8bf6-4qlwf\" (UID: \"4180c049-4ede-4df6-929d-4a3250404f38\") " pod="openstack-operators/openstack-operator-controller-operator-fb44c8bf6-4qlwf" Oct 07 08:09:37 crc kubenswrapper[4875]: I1007 08:09:37.588073 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-operator-dockercfg-dk9zg" Oct 07 08:09:37 crc kubenswrapper[4875]: I1007 08:09:37.594548 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-fb44c8bf6-4qlwf" Oct 07 08:09:38 crc kubenswrapper[4875]: I1007 08:09:38.131858 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-fb44c8bf6-4qlwf"] Oct 07 08:09:38 crc kubenswrapper[4875]: W1007 08:09:38.136931 4875 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4180c049_4ede_4df6_929d_4a3250404f38.slice/crio-31ae402f2a930c07416f37a8fe518e24585575b85a8c84dbdc30583ecc94ed16 WatchSource:0}: Error finding container 31ae402f2a930c07416f37a8fe518e24585575b85a8c84dbdc30583ecc94ed16: Status 404 returned error can't find the container with id 31ae402f2a930c07416f37a8fe518e24585575b85a8c84dbdc30583ecc94ed16 Oct 07 08:09:38 crc kubenswrapper[4875]: I1007 08:09:38.243203 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-fb44c8bf6-4qlwf" event={"ID":"4180c049-4ede-4df6-929d-4a3250404f38","Type":"ContainerStarted","Data":"31ae402f2a930c07416f37a8fe518e24585575b85a8c84dbdc30583ecc94ed16"} Oct 07 08:09:40 crc kubenswrapper[4875]: I1007 08:09:40.432530 4875 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-cvz92" Oct 07 08:09:40 crc kubenswrapper[4875]: I1007 08:09:40.432610 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-cvz92" Oct 07 08:09:40 crc kubenswrapper[4875]: I1007 08:09:40.483896 4875 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-cvz92" Oct 07 08:09:41 crc kubenswrapper[4875]: I1007 08:09:41.322951 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-cvz92" Oct 07 08:09:42 crc kubenswrapper[4875]: I1007 08:09:42.891232 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-cvz92"] Oct 07 08:09:43 crc kubenswrapper[4875]: I1007 08:09:43.285686 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-fb44c8bf6-4qlwf" event={"ID":"4180c049-4ede-4df6-929d-4a3250404f38","Type":"ContainerStarted","Data":"4a9e34f4d6b2e20d5a5b58c76b159aad5920a4969de960b79fa27ce9a54e8379"} Oct 07 08:09:43 crc kubenswrapper[4875]: I1007 08:09:43.285945 4875 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-cvz92" podUID="fdf0fdca-7c30-4b30-ba3a-d996471db50f" containerName="registry-server" containerID="cri-o://49166626255b32ae2bdba9b52679c9cb12b11c5f2b5572373d7013da9bbf33bb" gracePeriod=2 Oct 07 08:09:44 crc kubenswrapper[4875]: I1007 08:09:44.988084 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-cvz92" Oct 07 08:09:45 crc kubenswrapper[4875]: I1007 08:09:45.117788 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fdf0fdca-7c30-4b30-ba3a-d996471db50f-utilities\") pod \"fdf0fdca-7c30-4b30-ba3a-d996471db50f\" (UID: \"fdf0fdca-7c30-4b30-ba3a-d996471db50f\") " Oct 07 08:09:45 crc kubenswrapper[4875]: I1007 08:09:45.118035 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fdf0fdca-7c30-4b30-ba3a-d996471db50f-catalog-content\") pod \"fdf0fdca-7c30-4b30-ba3a-d996471db50f\" (UID: \"fdf0fdca-7c30-4b30-ba3a-d996471db50f\") " Oct 07 08:09:45 crc kubenswrapper[4875]: I1007 08:09:45.118127 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kj6sl\" (UniqueName: \"kubernetes.io/projected/fdf0fdca-7c30-4b30-ba3a-d996471db50f-kube-api-access-kj6sl\") pod \"fdf0fdca-7c30-4b30-ba3a-d996471db50f\" (UID: \"fdf0fdca-7c30-4b30-ba3a-d996471db50f\") " Oct 07 08:09:45 crc kubenswrapper[4875]: I1007 08:09:45.120509 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fdf0fdca-7c30-4b30-ba3a-d996471db50f-utilities" (OuterVolumeSpecName: "utilities") pod "fdf0fdca-7c30-4b30-ba3a-d996471db50f" (UID: "fdf0fdca-7c30-4b30-ba3a-d996471db50f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 08:09:45 crc kubenswrapper[4875]: I1007 08:09:45.126208 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fdf0fdca-7c30-4b30-ba3a-d996471db50f-kube-api-access-kj6sl" (OuterVolumeSpecName: "kube-api-access-kj6sl") pod "fdf0fdca-7c30-4b30-ba3a-d996471db50f" (UID: "fdf0fdca-7c30-4b30-ba3a-d996471db50f"). InnerVolumeSpecName "kube-api-access-kj6sl". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 08:09:45 crc kubenswrapper[4875]: I1007 08:09:45.219718 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fdf0fdca-7c30-4b30-ba3a-d996471db50f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "fdf0fdca-7c30-4b30-ba3a-d996471db50f" (UID: "fdf0fdca-7c30-4b30-ba3a-d996471db50f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 08:09:45 crc kubenswrapper[4875]: I1007 08:09:45.219756 4875 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fdf0fdca-7c30-4b30-ba3a-d996471db50f-utilities\") on node \"crc\" DevicePath \"\"" Oct 07 08:09:45 crc kubenswrapper[4875]: I1007 08:09:45.220377 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kj6sl\" (UniqueName: \"kubernetes.io/projected/fdf0fdca-7c30-4b30-ba3a-d996471db50f-kube-api-access-kj6sl\") on node \"crc\" DevicePath \"\"" Oct 07 08:09:45 crc kubenswrapper[4875]: I1007 08:09:45.303779 4875 generic.go:334] "Generic (PLEG): container finished" podID="fdf0fdca-7c30-4b30-ba3a-d996471db50f" containerID="49166626255b32ae2bdba9b52679c9cb12b11c5f2b5572373d7013da9bbf33bb" exitCode=0 Oct 07 08:09:45 crc kubenswrapper[4875]: I1007 08:09:45.303830 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cvz92" event={"ID":"fdf0fdca-7c30-4b30-ba3a-d996471db50f","Type":"ContainerDied","Data":"49166626255b32ae2bdba9b52679c9cb12b11c5f2b5572373d7013da9bbf33bb"} Oct 07 08:09:45 crc kubenswrapper[4875]: I1007 08:09:45.303866 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cvz92" event={"ID":"fdf0fdca-7c30-4b30-ba3a-d996471db50f","Type":"ContainerDied","Data":"4b33679d6839c42bd3b3bd128f14822aedae49cab4e8dc04dfa6f1599969c407"} Oct 07 08:09:45 crc kubenswrapper[4875]: I1007 08:09:45.303866 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-cvz92" Oct 07 08:09:45 crc kubenswrapper[4875]: I1007 08:09:45.304238 4875 scope.go:117] "RemoveContainer" containerID="49166626255b32ae2bdba9b52679c9cb12b11c5f2b5572373d7013da9bbf33bb" Oct 07 08:09:45 crc kubenswrapper[4875]: I1007 08:09:45.321804 4875 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fdf0fdca-7c30-4b30-ba3a-d996471db50f-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 07 08:09:45 crc kubenswrapper[4875]: I1007 08:09:45.338458 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-cvz92"] Oct 07 08:09:45 crc kubenswrapper[4875]: I1007 08:09:45.344044 4875 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-cvz92"] Oct 07 08:09:45 crc kubenswrapper[4875]: I1007 08:09:45.707305 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fdf0fdca-7c30-4b30-ba3a-d996471db50f" path="/var/lib/kubelet/pods/fdf0fdca-7c30-4b30-ba3a-d996471db50f/volumes" Oct 07 08:09:45 crc kubenswrapper[4875]: I1007 08:09:45.797290 4875 scope.go:117] "RemoveContainer" containerID="cd1880e5be55a1cad64c308fa333b91115089ba0f57d92fb0c848831adba12e5" Oct 07 08:09:45 crc kubenswrapper[4875]: I1007 08:09:45.831474 4875 scope.go:117] "RemoveContainer" containerID="1c3a81b40f7dc3e707d1aa5552f33af56911ecce7d78c8723fc068e4f51d25bb" Oct 07 08:09:45 crc kubenswrapper[4875]: I1007 08:09:45.856036 4875 scope.go:117] "RemoveContainer" containerID="49166626255b32ae2bdba9b52679c9cb12b11c5f2b5572373d7013da9bbf33bb" Oct 07 08:09:45 crc kubenswrapper[4875]: E1007 08:09:45.856644 4875 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"49166626255b32ae2bdba9b52679c9cb12b11c5f2b5572373d7013da9bbf33bb\": container with ID starting with 49166626255b32ae2bdba9b52679c9cb12b11c5f2b5572373d7013da9bbf33bb not found: ID does not exist" containerID="49166626255b32ae2bdba9b52679c9cb12b11c5f2b5572373d7013da9bbf33bb" Oct 07 08:09:45 crc kubenswrapper[4875]: I1007 08:09:45.856717 4875 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"49166626255b32ae2bdba9b52679c9cb12b11c5f2b5572373d7013da9bbf33bb"} err="failed to get container status \"49166626255b32ae2bdba9b52679c9cb12b11c5f2b5572373d7013da9bbf33bb\": rpc error: code = NotFound desc = could not find container \"49166626255b32ae2bdba9b52679c9cb12b11c5f2b5572373d7013da9bbf33bb\": container with ID starting with 49166626255b32ae2bdba9b52679c9cb12b11c5f2b5572373d7013da9bbf33bb not found: ID does not exist" Oct 07 08:09:45 crc kubenswrapper[4875]: I1007 08:09:45.856749 4875 scope.go:117] "RemoveContainer" containerID="cd1880e5be55a1cad64c308fa333b91115089ba0f57d92fb0c848831adba12e5" Oct 07 08:09:45 crc kubenswrapper[4875]: E1007 08:09:45.857131 4875 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cd1880e5be55a1cad64c308fa333b91115089ba0f57d92fb0c848831adba12e5\": container with ID starting with cd1880e5be55a1cad64c308fa333b91115089ba0f57d92fb0c848831adba12e5 not found: ID does not exist" containerID="cd1880e5be55a1cad64c308fa333b91115089ba0f57d92fb0c848831adba12e5" Oct 07 08:09:45 crc kubenswrapper[4875]: I1007 08:09:45.857162 4875 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cd1880e5be55a1cad64c308fa333b91115089ba0f57d92fb0c848831adba12e5"} err="failed to get container status \"cd1880e5be55a1cad64c308fa333b91115089ba0f57d92fb0c848831adba12e5\": rpc error: code = NotFound desc = could not find container \"cd1880e5be55a1cad64c308fa333b91115089ba0f57d92fb0c848831adba12e5\": container with ID starting with cd1880e5be55a1cad64c308fa333b91115089ba0f57d92fb0c848831adba12e5 not found: ID does not exist" Oct 07 08:09:45 crc kubenswrapper[4875]: I1007 08:09:45.857178 4875 scope.go:117] "RemoveContainer" containerID="1c3a81b40f7dc3e707d1aa5552f33af56911ecce7d78c8723fc068e4f51d25bb" Oct 07 08:09:45 crc kubenswrapper[4875]: E1007 08:09:45.857736 4875 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1c3a81b40f7dc3e707d1aa5552f33af56911ecce7d78c8723fc068e4f51d25bb\": container with ID starting with 1c3a81b40f7dc3e707d1aa5552f33af56911ecce7d78c8723fc068e4f51d25bb not found: ID does not exist" containerID="1c3a81b40f7dc3e707d1aa5552f33af56911ecce7d78c8723fc068e4f51d25bb" Oct 07 08:09:45 crc kubenswrapper[4875]: I1007 08:09:45.857759 4875 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1c3a81b40f7dc3e707d1aa5552f33af56911ecce7d78c8723fc068e4f51d25bb"} err="failed to get container status \"1c3a81b40f7dc3e707d1aa5552f33af56911ecce7d78c8723fc068e4f51d25bb\": rpc error: code = NotFound desc = could not find container \"1c3a81b40f7dc3e707d1aa5552f33af56911ecce7d78c8723fc068e4f51d25bb\": container with ID starting with 1c3a81b40f7dc3e707d1aa5552f33af56911ecce7d78c8723fc068e4f51d25bb not found: ID does not exist" Oct 07 08:09:46 crc kubenswrapper[4875]: I1007 08:09:46.334688 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-fb44c8bf6-4qlwf" event={"ID":"4180c049-4ede-4df6-929d-4a3250404f38","Type":"ContainerStarted","Data":"8bf3a258b95a33df55bc0a7864be41528d74c946ebf3bac8e313133e3d9eada2"} Oct 07 08:09:46 crc kubenswrapper[4875]: I1007 08:09:46.335241 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-operator-fb44c8bf6-4qlwf" Oct 07 08:09:46 crc kubenswrapper[4875]: I1007 08:09:46.374508 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-operator-fb44c8bf6-4qlwf" podStartSLOduration=2.659635804 podStartE2EDuration="10.374466133s" podCreationTimestamp="2025-10-07 08:09:36 +0000 UTC" firstStartedPulling="2025-10-07 08:09:38.142924771 +0000 UTC m=+803.102695314" lastFinishedPulling="2025-10-07 08:09:45.8577551 +0000 UTC m=+810.817525643" observedRunningTime="2025-10-07 08:09:46.369325992 +0000 UTC m=+811.329096555" watchObservedRunningTime="2025-10-07 08:09:46.374466133 +0000 UTC m=+811.334236676" Oct 07 08:09:47 crc kubenswrapper[4875]: I1007 08:09:47.349500 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-operator-fb44c8bf6-4qlwf" Oct 07 08:10:13 crc kubenswrapper[4875]: I1007 08:10:13.525790 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-4qtlx"] Oct 07 08:10:13 crc kubenswrapper[4875]: E1007 08:10:13.527045 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fdf0fdca-7c30-4b30-ba3a-d996471db50f" containerName="extract-utilities" Oct 07 08:10:13 crc kubenswrapper[4875]: I1007 08:10:13.527066 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="fdf0fdca-7c30-4b30-ba3a-d996471db50f" containerName="extract-utilities" Oct 07 08:10:13 crc kubenswrapper[4875]: E1007 08:10:13.527083 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fdf0fdca-7c30-4b30-ba3a-d996471db50f" containerName="extract-content" Oct 07 08:10:13 crc kubenswrapper[4875]: I1007 08:10:13.527091 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="fdf0fdca-7c30-4b30-ba3a-d996471db50f" containerName="extract-content" Oct 07 08:10:13 crc kubenswrapper[4875]: E1007 08:10:13.527106 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fdf0fdca-7c30-4b30-ba3a-d996471db50f" containerName="registry-server" Oct 07 08:10:13 crc kubenswrapper[4875]: I1007 08:10:13.527115 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="fdf0fdca-7c30-4b30-ba3a-d996471db50f" containerName="registry-server" Oct 07 08:10:13 crc kubenswrapper[4875]: I1007 08:10:13.527271 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="fdf0fdca-7c30-4b30-ba3a-d996471db50f" containerName="registry-server" Oct 07 08:10:13 crc kubenswrapper[4875]: I1007 08:10:13.528173 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-4qtlx" Oct 07 08:10:13 crc kubenswrapper[4875]: I1007 08:10:13.546666 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-4qtlx"] Oct 07 08:10:13 crc kubenswrapper[4875]: I1007 08:10:13.582515 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/90a5c426-84c6-4d3c-a161-a78990842890-catalog-content\") pod \"certified-operators-4qtlx\" (UID: \"90a5c426-84c6-4d3c-a161-a78990842890\") " pod="openshift-marketplace/certified-operators-4qtlx" Oct 07 08:10:13 crc kubenswrapper[4875]: I1007 08:10:13.582698 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/90a5c426-84c6-4d3c-a161-a78990842890-utilities\") pod \"certified-operators-4qtlx\" (UID: \"90a5c426-84c6-4d3c-a161-a78990842890\") " pod="openshift-marketplace/certified-operators-4qtlx" Oct 07 08:10:13 crc kubenswrapper[4875]: I1007 08:10:13.582728 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l2zvd\" (UniqueName: \"kubernetes.io/projected/90a5c426-84c6-4d3c-a161-a78990842890-kube-api-access-l2zvd\") pod \"certified-operators-4qtlx\" (UID: \"90a5c426-84c6-4d3c-a161-a78990842890\") " pod="openshift-marketplace/certified-operators-4qtlx" Oct 07 08:10:13 crc kubenswrapper[4875]: I1007 08:10:13.684320 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/90a5c426-84c6-4d3c-a161-a78990842890-catalog-content\") pod \"certified-operators-4qtlx\" (UID: \"90a5c426-84c6-4d3c-a161-a78990842890\") " pod="openshift-marketplace/certified-operators-4qtlx" Oct 07 08:10:13 crc kubenswrapper[4875]: I1007 08:10:13.684395 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/90a5c426-84c6-4d3c-a161-a78990842890-utilities\") pod \"certified-operators-4qtlx\" (UID: \"90a5c426-84c6-4d3c-a161-a78990842890\") " pod="openshift-marketplace/certified-operators-4qtlx" Oct 07 08:10:13 crc kubenswrapper[4875]: I1007 08:10:13.684436 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l2zvd\" (UniqueName: \"kubernetes.io/projected/90a5c426-84c6-4d3c-a161-a78990842890-kube-api-access-l2zvd\") pod \"certified-operators-4qtlx\" (UID: \"90a5c426-84c6-4d3c-a161-a78990842890\") " pod="openshift-marketplace/certified-operators-4qtlx" Oct 07 08:10:13 crc kubenswrapper[4875]: I1007 08:10:13.685047 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/90a5c426-84c6-4d3c-a161-a78990842890-catalog-content\") pod \"certified-operators-4qtlx\" (UID: \"90a5c426-84c6-4d3c-a161-a78990842890\") " pod="openshift-marketplace/certified-operators-4qtlx" Oct 07 08:10:13 crc kubenswrapper[4875]: I1007 08:10:13.685072 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/90a5c426-84c6-4d3c-a161-a78990842890-utilities\") pod \"certified-operators-4qtlx\" (UID: \"90a5c426-84c6-4d3c-a161-a78990842890\") " pod="openshift-marketplace/certified-operators-4qtlx" Oct 07 08:10:13 crc kubenswrapper[4875]: I1007 08:10:13.710023 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l2zvd\" (UniqueName: \"kubernetes.io/projected/90a5c426-84c6-4d3c-a161-a78990842890-kube-api-access-l2zvd\") pod \"certified-operators-4qtlx\" (UID: \"90a5c426-84c6-4d3c-a161-a78990842890\") " pod="openshift-marketplace/certified-operators-4qtlx" Oct 07 08:10:13 crc kubenswrapper[4875]: I1007 08:10:13.902647 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-4qtlx" Oct 07 08:10:14 crc kubenswrapper[4875]: I1007 08:10:14.419801 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-4qtlx"] Oct 07 08:10:14 crc kubenswrapper[4875]: I1007 08:10:14.553076 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4qtlx" event={"ID":"90a5c426-84c6-4d3c-a161-a78990842890","Type":"ContainerStarted","Data":"b6599e60d7b9a067227eabbba272eaf3235e52abe7dba1159a2c73453b9c6e50"} Oct 07 08:10:15 crc kubenswrapper[4875]: I1007 08:10:15.567578 4875 generic.go:334] "Generic (PLEG): container finished" podID="90a5c426-84c6-4d3c-a161-a78990842890" containerID="cbdf5d950fbc0bbbc25cc503922a43c635066653179d0d679f6e089e2190767e" exitCode=0 Oct 07 08:10:15 crc kubenswrapper[4875]: I1007 08:10:15.568036 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4qtlx" event={"ID":"90a5c426-84c6-4d3c-a161-a78990842890","Type":"ContainerDied","Data":"cbdf5d950fbc0bbbc25cc503922a43c635066653179d0d679f6e089e2190767e"} Oct 07 08:10:17 crc kubenswrapper[4875]: I1007 08:10:17.629299 4875 generic.go:334] "Generic (PLEG): container finished" podID="90a5c426-84c6-4d3c-a161-a78990842890" containerID="a61e3376bea0790df3254ee563a621cc7ec7688ea4723e948a1fb0ba42e88f48" exitCode=0 Oct 07 08:10:17 crc kubenswrapper[4875]: I1007 08:10:17.629613 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4qtlx" event={"ID":"90a5c426-84c6-4d3c-a161-a78990842890","Type":"ContainerDied","Data":"a61e3376bea0790df3254ee563a621cc7ec7688ea4723e948a1fb0ba42e88f48"} Oct 07 08:10:18 crc kubenswrapper[4875]: I1007 08:10:18.642862 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4qtlx" event={"ID":"90a5c426-84c6-4d3c-a161-a78990842890","Type":"ContainerStarted","Data":"309f4340b4a300593d103859c961934691a5d9e3e4146558a05e42adfb6201c6"} Oct 07 08:10:18 crc kubenswrapper[4875]: I1007 08:10:18.670758 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-4qtlx" podStartSLOduration=3.110340782 podStartE2EDuration="5.6707339s" podCreationTimestamp="2025-10-07 08:10:13 +0000 UTC" firstStartedPulling="2025-10-07 08:10:15.570429686 +0000 UTC m=+840.530200229" lastFinishedPulling="2025-10-07 08:10:18.130822804 +0000 UTC m=+843.090593347" observedRunningTime="2025-10-07 08:10:18.667166993 +0000 UTC m=+843.626937546" watchObservedRunningTime="2025-10-07 08:10:18.6707339 +0000 UTC m=+843.630504443" Oct 07 08:10:22 crc kubenswrapper[4875]: I1007 08:10:22.874763 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/barbican-operator-controller-manager-58c4cd55f4-tslhs"] Oct 07 08:10:22 crc kubenswrapper[4875]: I1007 08:10:22.877223 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-58c4cd55f4-tslhs" Oct 07 08:10:22 crc kubenswrapper[4875]: I1007 08:10:22.879478 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"barbican-operator-controller-manager-dockercfg-ltpnv" Oct 07 08:10:22 crc kubenswrapper[4875]: I1007 08:10:22.893624 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-58c4cd55f4-tslhs"] Oct 07 08:10:22 crc kubenswrapper[4875]: I1007 08:10:22.903485 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/cinder-operator-controller-manager-7d4d4f8d-8chkp"] Oct 07 08:10:22 crc kubenswrapper[4875]: I1007 08:10:22.904708 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-7d4d4f8d-8chkp" Oct 07 08:10:22 crc kubenswrapper[4875]: I1007 08:10:22.907496 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"cinder-operator-controller-manager-dockercfg-sqzqb" Oct 07 08:10:22 crc kubenswrapper[4875]: I1007 08:10:22.908456 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-7d4d4f8d-8chkp"] Oct 07 08:10:22 crc kubenswrapper[4875]: I1007 08:10:22.917862 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/designate-operator-controller-manager-75dfd9b554-n9gbs"] Oct 07 08:10:22 crc kubenswrapper[4875]: I1007 08:10:22.919081 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-75dfd9b554-n9gbs" Oct 07 08:10:22 crc kubenswrapper[4875]: I1007 08:10:22.923636 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tlmqf\" (UniqueName: \"kubernetes.io/projected/cfd6de96-f3bd-4f0d-a076-f7c748b9b6a5-kube-api-access-tlmqf\") pod \"barbican-operator-controller-manager-58c4cd55f4-tslhs\" (UID: \"cfd6de96-f3bd-4f0d-a076-f7c748b9b6a5\") " pod="openstack-operators/barbican-operator-controller-manager-58c4cd55f4-tslhs" Oct 07 08:10:22 crc kubenswrapper[4875]: I1007 08:10:22.924174 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"designate-operator-controller-manager-dockercfg-4d7g5" Oct 07 08:10:22 crc kubenswrapper[4875]: I1007 08:10:22.928477 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-75dfd9b554-n9gbs"] Oct 07 08:10:22 crc kubenswrapper[4875]: I1007 08:10:22.956368 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/glance-operator-controller-manager-5dc44df7d5-zpbdj"] Oct 07 08:10:22 crc kubenswrapper[4875]: I1007 08:10:22.957650 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-5dc44df7d5-zpbdj" Oct 07 08:10:22 crc kubenswrapper[4875]: I1007 08:10:22.960234 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"glance-operator-controller-manager-dockercfg-5pkks" Oct 07 08:10:22 crc kubenswrapper[4875]: I1007 08:10:22.977427 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-5dc44df7d5-zpbdj"] Oct 07 08:10:22 crc kubenswrapper[4875]: I1007 08:10:22.988903 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/heat-operator-controller-manager-54b4974c45-t97rc"] Oct 07 08:10:22 crc kubenswrapper[4875]: I1007 08:10:22.998141 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/horizon-operator-controller-manager-76d5b87f47-g6cn6"] Oct 07 08:10:22 crc kubenswrapper[4875]: I1007 08:10:22.999203 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-76d5b87f47-g6cn6" Oct 07 08:10:22 crc kubenswrapper[4875]: I1007 08:10:22.999708 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-54b4974c45-t97rc" Oct 07 08:10:23 crc kubenswrapper[4875]: I1007 08:10:23.008848 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"heat-operator-controller-manager-dockercfg-jj89k" Oct 07 08:10:23 crc kubenswrapper[4875]: I1007 08:10:23.009158 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"horizon-operator-controller-manager-dockercfg-jjs6j" Oct 07 08:10:23 crc kubenswrapper[4875]: I1007 08:10:23.024841 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7nm2m\" (UniqueName: \"kubernetes.io/projected/0c4de86e-6c46-4292-8c13-faeff0997ac4-kube-api-access-7nm2m\") pod \"heat-operator-controller-manager-54b4974c45-t97rc\" (UID: \"0c4de86e-6c46-4292-8c13-faeff0997ac4\") " pod="openstack-operators/heat-operator-controller-manager-54b4974c45-t97rc" Oct 07 08:10:23 crc kubenswrapper[4875]: I1007 08:10:23.024911 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jhtd5\" (UniqueName: \"kubernetes.io/projected/b9617ed2-0ac9-45b8-8089-1091ff8937dd-kube-api-access-jhtd5\") pod \"glance-operator-controller-manager-5dc44df7d5-zpbdj\" (UID: \"b9617ed2-0ac9-45b8-8089-1091ff8937dd\") " pod="openstack-operators/glance-operator-controller-manager-5dc44df7d5-zpbdj" Oct 07 08:10:23 crc kubenswrapper[4875]: I1007 08:10:23.024945 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x7f65\" (UniqueName: \"kubernetes.io/projected/ecbab85b-669d-4669-aa95-597dc630b7e6-kube-api-access-x7f65\") pod \"cinder-operator-controller-manager-7d4d4f8d-8chkp\" (UID: \"ecbab85b-669d-4669-aa95-597dc630b7e6\") " pod="openstack-operators/cinder-operator-controller-manager-7d4d4f8d-8chkp" Oct 07 08:10:23 crc kubenswrapper[4875]: I1007 08:10:23.024970 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hktxv\" (UniqueName: \"kubernetes.io/projected/0ef97224-fe40-43a3-af95-f4e1986b8fbe-kube-api-access-hktxv\") pod \"designate-operator-controller-manager-75dfd9b554-n9gbs\" (UID: \"0ef97224-fe40-43a3-af95-f4e1986b8fbe\") " pod="openstack-operators/designate-operator-controller-manager-75dfd9b554-n9gbs" Oct 07 08:10:23 crc kubenswrapper[4875]: I1007 08:10:23.024999 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tlmqf\" (UniqueName: \"kubernetes.io/projected/cfd6de96-f3bd-4f0d-a076-f7c748b9b6a5-kube-api-access-tlmqf\") pod \"barbican-operator-controller-manager-58c4cd55f4-tslhs\" (UID: \"cfd6de96-f3bd-4f0d-a076-f7c748b9b6a5\") " pod="openstack-operators/barbican-operator-controller-manager-58c4cd55f4-tslhs" Oct 07 08:10:23 crc kubenswrapper[4875]: I1007 08:10:23.025029 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sqp26\" (UniqueName: \"kubernetes.io/projected/52379554-390c-4cb2-97ae-0cb0596f36d1-kube-api-access-sqp26\") pod \"horizon-operator-controller-manager-76d5b87f47-g6cn6\" (UID: \"52379554-390c-4cb2-97ae-0cb0596f36d1\") " pod="openstack-operators/horizon-operator-controller-manager-76d5b87f47-g6cn6" Oct 07 08:10:23 crc kubenswrapper[4875]: I1007 08:10:23.032947 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/infra-operator-controller-manager-658588b8c9-l77jd"] Oct 07 08:10:23 crc kubenswrapper[4875]: I1007 08:10:23.034290 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-658588b8c9-l77jd" Oct 07 08:10:23 crc kubenswrapper[4875]: I1007 08:10:23.037086 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-controller-manager-dockercfg-9wwml" Oct 07 08:10:23 crc kubenswrapper[4875]: I1007 08:10:23.042000 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-76d5b87f47-g6cn6"] Oct 07 08:10:23 crc kubenswrapper[4875]: I1007 08:10:23.043590 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-webhook-server-cert" Oct 07 08:10:23 crc kubenswrapper[4875]: I1007 08:10:23.079151 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-54b4974c45-t97rc"] Oct 07 08:10:23 crc kubenswrapper[4875]: I1007 08:10:23.115238 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-658588b8c9-l77jd"] Oct 07 08:10:23 crc kubenswrapper[4875]: I1007 08:10:23.123482 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tlmqf\" (UniqueName: \"kubernetes.io/projected/cfd6de96-f3bd-4f0d-a076-f7c748b9b6a5-kube-api-access-tlmqf\") pod \"barbican-operator-controller-manager-58c4cd55f4-tslhs\" (UID: \"cfd6de96-f3bd-4f0d-a076-f7c748b9b6a5\") " pod="openstack-operators/barbican-operator-controller-manager-58c4cd55f4-tslhs" Oct 07 08:10:23 crc kubenswrapper[4875]: I1007 08:10:23.136489 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7nm2m\" (UniqueName: \"kubernetes.io/projected/0c4de86e-6c46-4292-8c13-faeff0997ac4-kube-api-access-7nm2m\") pod \"heat-operator-controller-manager-54b4974c45-t97rc\" (UID: \"0c4de86e-6c46-4292-8c13-faeff0997ac4\") " pod="openstack-operators/heat-operator-controller-manager-54b4974c45-t97rc" Oct 07 08:10:23 crc kubenswrapper[4875]: I1007 08:10:23.136617 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jhtd5\" (UniqueName: \"kubernetes.io/projected/b9617ed2-0ac9-45b8-8089-1091ff8937dd-kube-api-access-jhtd5\") pod \"glance-operator-controller-manager-5dc44df7d5-zpbdj\" (UID: \"b9617ed2-0ac9-45b8-8089-1091ff8937dd\") " pod="openstack-operators/glance-operator-controller-manager-5dc44df7d5-zpbdj" Oct 07 08:10:23 crc kubenswrapper[4875]: I1007 08:10:23.136685 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x7f65\" (UniqueName: \"kubernetes.io/projected/ecbab85b-669d-4669-aa95-597dc630b7e6-kube-api-access-x7f65\") pod \"cinder-operator-controller-manager-7d4d4f8d-8chkp\" (UID: \"ecbab85b-669d-4669-aa95-597dc630b7e6\") " pod="openstack-operators/cinder-operator-controller-manager-7d4d4f8d-8chkp" Oct 07 08:10:23 crc kubenswrapper[4875]: I1007 08:10:23.136764 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hktxv\" (UniqueName: \"kubernetes.io/projected/0ef97224-fe40-43a3-af95-f4e1986b8fbe-kube-api-access-hktxv\") pod \"designate-operator-controller-manager-75dfd9b554-n9gbs\" (UID: \"0ef97224-fe40-43a3-af95-f4e1986b8fbe\") " pod="openstack-operators/designate-operator-controller-manager-75dfd9b554-n9gbs" Oct 07 08:10:23 crc kubenswrapper[4875]: I1007 08:10:23.136823 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sqp26\" (UniqueName: \"kubernetes.io/projected/52379554-390c-4cb2-97ae-0cb0596f36d1-kube-api-access-sqp26\") pod \"horizon-operator-controller-manager-76d5b87f47-g6cn6\" (UID: \"52379554-390c-4cb2-97ae-0cb0596f36d1\") " pod="openstack-operators/horizon-operator-controller-manager-76d5b87f47-g6cn6" Oct 07 08:10:23 crc kubenswrapper[4875]: I1007 08:10:23.136860 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/0cce3a76-3617-40be-8d2b-b8f9184e6b61-cert\") pod \"infra-operator-controller-manager-658588b8c9-l77jd\" (UID: \"0cce3a76-3617-40be-8d2b-b8f9184e6b61\") " pod="openstack-operators/infra-operator-controller-manager-658588b8c9-l77jd" Oct 07 08:10:23 crc kubenswrapper[4875]: I1007 08:10:23.137145 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bqwjf\" (UniqueName: \"kubernetes.io/projected/0cce3a76-3617-40be-8d2b-b8f9184e6b61-kube-api-access-bqwjf\") pod \"infra-operator-controller-manager-658588b8c9-l77jd\" (UID: \"0cce3a76-3617-40be-8d2b-b8f9184e6b61\") " pod="openstack-operators/infra-operator-controller-manager-658588b8c9-l77jd" Oct 07 08:10:23 crc kubenswrapper[4875]: I1007 08:10:23.166999 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jhtd5\" (UniqueName: \"kubernetes.io/projected/b9617ed2-0ac9-45b8-8089-1091ff8937dd-kube-api-access-jhtd5\") pod \"glance-operator-controller-manager-5dc44df7d5-zpbdj\" (UID: \"b9617ed2-0ac9-45b8-8089-1091ff8937dd\") " pod="openstack-operators/glance-operator-controller-manager-5dc44df7d5-zpbdj" Oct 07 08:10:23 crc kubenswrapper[4875]: I1007 08:10:23.167442 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7nm2m\" (UniqueName: \"kubernetes.io/projected/0c4de86e-6c46-4292-8c13-faeff0997ac4-kube-api-access-7nm2m\") pod \"heat-operator-controller-manager-54b4974c45-t97rc\" (UID: \"0c4de86e-6c46-4292-8c13-faeff0997ac4\") " pod="openstack-operators/heat-operator-controller-manager-54b4974c45-t97rc" Oct 07 08:10:23 crc kubenswrapper[4875]: I1007 08:10:23.175538 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x7f65\" (UniqueName: \"kubernetes.io/projected/ecbab85b-669d-4669-aa95-597dc630b7e6-kube-api-access-x7f65\") pod \"cinder-operator-controller-manager-7d4d4f8d-8chkp\" (UID: \"ecbab85b-669d-4669-aa95-597dc630b7e6\") " pod="openstack-operators/cinder-operator-controller-manager-7d4d4f8d-8chkp" Oct 07 08:10:23 crc kubenswrapper[4875]: I1007 08:10:23.175695 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ironic-operator-controller-manager-649675d675-6vp52"] Oct 07 08:10:23 crc kubenswrapper[4875]: I1007 08:10:23.177611 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sqp26\" (UniqueName: \"kubernetes.io/projected/52379554-390c-4cb2-97ae-0cb0596f36d1-kube-api-access-sqp26\") pod \"horizon-operator-controller-manager-76d5b87f47-g6cn6\" (UID: \"52379554-390c-4cb2-97ae-0cb0596f36d1\") " pod="openstack-operators/horizon-operator-controller-manager-76d5b87f47-g6cn6" Oct 07 08:10:23 crc kubenswrapper[4875]: I1007 08:10:23.177873 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-649675d675-6vp52" Oct 07 08:10:23 crc kubenswrapper[4875]: I1007 08:10:23.179731 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/keystone-operator-controller-manager-7b5ccf6d9c-lvqtd"] Oct 07 08:10:23 crc kubenswrapper[4875]: I1007 08:10:23.190719 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-7b5ccf6d9c-lvqtd" Oct 07 08:10:23 crc kubenswrapper[4875]: I1007 08:10:23.203613 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"keystone-operator-controller-manager-dockercfg-58f5g" Oct 07 08:10:23 crc kubenswrapper[4875]: I1007 08:10:23.205335 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ironic-operator-controller-manager-dockercfg-nkx52" Oct 07 08:10:23 crc kubenswrapper[4875]: I1007 08:10:23.205972 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-58c4cd55f4-tslhs" Oct 07 08:10:23 crc kubenswrapper[4875]: I1007 08:10:23.207722 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/manila-operator-controller-manager-65d89cfd9f-zwhfp"] Oct 07 08:10:23 crc kubenswrapper[4875]: I1007 08:10:23.209328 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hktxv\" (UniqueName: \"kubernetes.io/projected/0ef97224-fe40-43a3-af95-f4e1986b8fbe-kube-api-access-hktxv\") pod \"designate-operator-controller-manager-75dfd9b554-n9gbs\" (UID: \"0ef97224-fe40-43a3-af95-f4e1986b8fbe\") " pod="openstack-operators/designate-operator-controller-manager-75dfd9b554-n9gbs" Oct 07 08:10:23 crc kubenswrapper[4875]: I1007 08:10:23.211072 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-65d89cfd9f-zwhfp" Oct 07 08:10:23 crc kubenswrapper[4875]: I1007 08:10:23.212859 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"manila-operator-controller-manager-dockercfg-9dxkj" Oct 07 08:10:23 crc kubenswrapper[4875]: I1007 08:10:23.222843 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-7d4d4f8d-8chkp" Oct 07 08:10:23 crc kubenswrapper[4875]: I1007 08:10:23.237630 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-75dfd9b554-n9gbs" Oct 07 08:10:23 crc kubenswrapper[4875]: I1007 08:10:23.239598 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bqwjf\" (UniqueName: \"kubernetes.io/projected/0cce3a76-3617-40be-8d2b-b8f9184e6b61-kube-api-access-bqwjf\") pod \"infra-operator-controller-manager-658588b8c9-l77jd\" (UID: \"0cce3a76-3617-40be-8d2b-b8f9184e6b61\") " pod="openstack-operators/infra-operator-controller-manager-658588b8c9-l77jd" Oct 07 08:10:23 crc kubenswrapper[4875]: I1007 08:10:23.239654 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pqdmk\" (UniqueName: \"kubernetes.io/projected/ab712745-89ea-43bb-b2d7-7192d3691acf-kube-api-access-pqdmk\") pod \"keystone-operator-controller-manager-7b5ccf6d9c-lvqtd\" (UID: \"ab712745-89ea-43bb-b2d7-7192d3691acf\") " pod="openstack-operators/keystone-operator-controller-manager-7b5ccf6d9c-lvqtd" Oct 07 08:10:23 crc kubenswrapper[4875]: I1007 08:10:23.239681 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-264qm\" (UniqueName: \"kubernetes.io/projected/45a1f3f1-6fbc-47d9-81c4-77e8ede46dd8-kube-api-access-264qm\") pod \"manila-operator-controller-manager-65d89cfd9f-zwhfp\" (UID: \"45a1f3f1-6fbc-47d9-81c4-77e8ede46dd8\") " pod="openstack-operators/manila-operator-controller-manager-65d89cfd9f-zwhfp" Oct 07 08:10:23 crc kubenswrapper[4875]: I1007 08:10:23.239726 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/0cce3a76-3617-40be-8d2b-b8f9184e6b61-cert\") pod \"infra-operator-controller-manager-658588b8c9-l77jd\" (UID: \"0cce3a76-3617-40be-8d2b-b8f9184e6b61\") " pod="openstack-operators/infra-operator-controller-manager-658588b8c9-l77jd" Oct 07 08:10:23 crc kubenswrapper[4875]: I1007 08:10:23.239757 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t997v\" (UniqueName: \"kubernetes.io/projected/f03c0528-11ac-4b93-8f46-4415192ba694-kube-api-access-t997v\") pod \"ironic-operator-controller-manager-649675d675-6vp52\" (UID: \"f03c0528-11ac-4b93-8f46-4415192ba694\") " pod="openstack-operators/ironic-operator-controller-manager-649675d675-6vp52" Oct 07 08:10:23 crc kubenswrapper[4875]: I1007 08:10:23.246922 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-649675d675-6vp52"] Oct 07 08:10:23 crc kubenswrapper[4875]: I1007 08:10:23.254435 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-7b5ccf6d9c-lvqtd"] Oct 07 08:10:23 crc kubenswrapper[4875]: I1007 08:10:23.255705 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/0cce3a76-3617-40be-8d2b-b8f9184e6b61-cert\") pod \"infra-operator-controller-manager-658588b8c9-l77jd\" (UID: \"0cce3a76-3617-40be-8d2b-b8f9184e6b61\") " pod="openstack-operators/infra-operator-controller-manager-658588b8c9-l77jd" Oct 07 08:10:23 crc kubenswrapper[4875]: I1007 08:10:23.267955 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-6cd6d7bdf5-n4t9j"] Oct 07 08:10:23 crc kubenswrapper[4875]: I1007 08:10:23.269299 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-6cd6d7bdf5-n4t9j" Oct 07 08:10:23 crc kubenswrapper[4875]: I1007 08:10:23.274016 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"mariadb-operator-controller-manager-dockercfg-m988v" Oct 07 08:10:23 crc kubenswrapper[4875]: I1007 08:10:23.276379 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-5dc44df7d5-zpbdj" Oct 07 08:10:23 crc kubenswrapper[4875]: I1007 08:10:23.279117 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-65d89cfd9f-zwhfp"] Oct 07 08:10:23 crc kubenswrapper[4875]: I1007 08:10:23.285267 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bqwjf\" (UniqueName: \"kubernetes.io/projected/0cce3a76-3617-40be-8d2b-b8f9184e6b61-kube-api-access-bqwjf\") pod \"infra-operator-controller-manager-658588b8c9-l77jd\" (UID: \"0cce3a76-3617-40be-8d2b-b8f9184e6b61\") " pod="openstack-operators/infra-operator-controller-manager-658588b8c9-l77jd" Oct 07 08:10:23 crc kubenswrapper[4875]: I1007 08:10:23.292898 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/neutron-operator-controller-manager-8d984cc4d-jcb62"] Oct 07 08:10:23 crc kubenswrapper[4875]: I1007 08:10:23.294367 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-8d984cc4d-jcb62" Oct 07 08:10:23 crc kubenswrapper[4875]: I1007 08:10:23.313347 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"neutron-operator-controller-manager-dockercfg-kv5b7" Oct 07 08:10:23 crc kubenswrapper[4875]: I1007 08:10:23.313572 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-6cd6d7bdf5-n4t9j"] Oct 07 08:10:23 crc kubenswrapper[4875]: I1007 08:10:23.331392 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-8d984cc4d-jcb62"] Oct 07 08:10:23 crc kubenswrapper[4875]: I1007 08:10:23.336417 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/octavia-operator-controller-manager-7468f855d8-wr55g"] Oct 07 08:10:23 crc kubenswrapper[4875]: I1007 08:10:23.337580 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-7468f855d8-wr55g" Oct 07 08:10:23 crc kubenswrapper[4875]: I1007 08:10:23.338669 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-76d5b87f47-g6cn6" Oct 07 08:10:23 crc kubenswrapper[4875]: I1007 08:10:23.341467 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pqdmk\" (UniqueName: \"kubernetes.io/projected/ab712745-89ea-43bb-b2d7-7192d3691acf-kube-api-access-pqdmk\") pod \"keystone-operator-controller-manager-7b5ccf6d9c-lvqtd\" (UID: \"ab712745-89ea-43bb-b2d7-7192d3691acf\") " pod="openstack-operators/keystone-operator-controller-manager-7b5ccf6d9c-lvqtd" Oct 07 08:10:23 crc kubenswrapper[4875]: I1007 08:10:23.341500 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-264qm\" (UniqueName: \"kubernetes.io/projected/45a1f3f1-6fbc-47d9-81c4-77e8ede46dd8-kube-api-access-264qm\") pod \"manila-operator-controller-manager-65d89cfd9f-zwhfp\" (UID: \"45a1f3f1-6fbc-47d9-81c4-77e8ede46dd8\") " pod="openstack-operators/manila-operator-controller-manager-65d89cfd9f-zwhfp" Oct 07 08:10:23 crc kubenswrapper[4875]: I1007 08:10:23.341630 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t997v\" (UniqueName: \"kubernetes.io/projected/f03c0528-11ac-4b93-8f46-4415192ba694-kube-api-access-t997v\") pod \"ironic-operator-controller-manager-649675d675-6vp52\" (UID: \"f03c0528-11ac-4b93-8f46-4415192ba694\") " pod="openstack-operators/ironic-operator-controller-manager-649675d675-6vp52" Oct 07 08:10:23 crc kubenswrapper[4875]: I1007 08:10:23.341677 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wkblm\" (UniqueName: \"kubernetes.io/projected/d9b65818-b46c-4a3d-8ed2-53d04e3dc834-kube-api-access-wkblm\") pod \"mariadb-operator-controller-manager-6cd6d7bdf5-n4t9j\" (UID: \"d9b65818-b46c-4a3d-8ed2-53d04e3dc834\") " pod="openstack-operators/mariadb-operator-controller-manager-6cd6d7bdf5-n4t9j" Oct 07 08:10:23 crc kubenswrapper[4875]: I1007 08:10:23.341710 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nwk24\" (UniqueName: \"kubernetes.io/projected/04963c19-bc7b-41c9-8b48-e3b8653738a8-kube-api-access-nwk24\") pod \"neutron-operator-controller-manager-8d984cc4d-jcb62\" (UID: \"04963c19-bc7b-41c9-8b48-e3b8653738a8\") " pod="openstack-operators/neutron-operator-controller-manager-8d984cc4d-jcb62" Oct 07 08:10:23 crc kubenswrapper[4875]: I1007 08:10:23.346318 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"octavia-operator-controller-manager-dockercfg-2q4hr" Oct 07 08:10:23 crc kubenswrapper[4875]: I1007 08:10:23.349494 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/nova-operator-controller-manager-7c7fc454ff-2s4ks"] Oct 07 08:10:23 crc kubenswrapper[4875]: I1007 08:10:23.350466 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-54b4974c45-t97rc" Oct 07 08:10:23 crc kubenswrapper[4875]: I1007 08:10:23.350746 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-7c7fc454ff-2s4ks" Oct 07 08:10:23 crc kubenswrapper[4875]: I1007 08:10:23.362101 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-7c7fc454ff-2s4ks"] Oct 07 08:10:23 crc kubenswrapper[4875]: I1007 08:10:23.362178 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-7468f855d8-wr55g"] Oct 07 08:10:23 crc kubenswrapper[4875]: I1007 08:10:23.364001 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"nova-operator-controller-manager-dockercfg-zqh5c" Oct 07 08:10:23 crc kubenswrapper[4875]: I1007 08:10:23.381784 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-658588b8c9-l77jd" Oct 07 08:10:23 crc kubenswrapper[4875]: I1007 08:10:23.398798 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-264qm\" (UniqueName: \"kubernetes.io/projected/45a1f3f1-6fbc-47d9-81c4-77e8ede46dd8-kube-api-access-264qm\") pod \"manila-operator-controller-manager-65d89cfd9f-zwhfp\" (UID: \"45a1f3f1-6fbc-47d9-81c4-77e8ede46dd8\") " pod="openstack-operators/manila-operator-controller-manager-65d89cfd9f-zwhfp" Oct 07 08:10:23 crc kubenswrapper[4875]: I1007 08:10:23.407969 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-5dfbbd665cxk99t"] Oct 07 08:10:23 crc kubenswrapper[4875]: I1007 08:10:23.408512 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pqdmk\" (UniqueName: \"kubernetes.io/projected/ab712745-89ea-43bb-b2d7-7192d3691acf-kube-api-access-pqdmk\") pod \"keystone-operator-controller-manager-7b5ccf6d9c-lvqtd\" (UID: \"ab712745-89ea-43bb-b2d7-7192d3691acf\") " pod="openstack-operators/keystone-operator-controller-manager-7b5ccf6d9c-lvqtd" Oct 07 08:10:23 crc kubenswrapper[4875]: I1007 08:10:23.411449 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-5dfbbd665cxk99t" Oct 07 08:10:23 crc kubenswrapper[4875]: I1007 08:10:23.414946 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-webhook-server-cert" Oct 07 08:10:23 crc kubenswrapper[4875]: I1007 08:10:23.415630 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-controller-manager-dockercfg-2jbbj" Oct 07 08:10:23 crc kubenswrapper[4875]: I1007 08:10:23.428752 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ovn-operator-controller-manager-6d8b6f9b9-dtxrv"] Oct 07 08:10:23 crc kubenswrapper[4875]: I1007 08:10:23.430161 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-6d8b6f9b9-dtxrv" Oct 07 08:10:23 crc kubenswrapper[4875]: I1007 08:10:23.437450 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ovn-operator-controller-manager-dockercfg-fg2nq" Oct 07 08:10:23 crc kubenswrapper[4875]: I1007 08:10:23.442755 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-5dfbbd665cxk99t"] Oct 07 08:10:23 crc kubenswrapper[4875]: I1007 08:10:23.443213 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wkblm\" (UniqueName: \"kubernetes.io/projected/d9b65818-b46c-4a3d-8ed2-53d04e3dc834-kube-api-access-wkblm\") pod \"mariadb-operator-controller-manager-6cd6d7bdf5-n4t9j\" (UID: \"d9b65818-b46c-4a3d-8ed2-53d04e3dc834\") " pod="openstack-operators/mariadb-operator-controller-manager-6cd6d7bdf5-n4t9j" Oct 07 08:10:23 crc kubenswrapper[4875]: I1007 08:10:23.443278 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6zxwl\" (UniqueName: \"kubernetes.io/projected/f5fb651c-9cca-4f7c-9136-37534358a8dd-kube-api-access-6zxwl\") pod \"ovn-operator-controller-manager-6d8b6f9b9-dtxrv\" (UID: \"f5fb651c-9cca-4f7c-9136-37534358a8dd\") " pod="openstack-operators/ovn-operator-controller-manager-6d8b6f9b9-dtxrv" Oct 07 08:10:23 crc kubenswrapper[4875]: I1007 08:10:23.443313 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nwk24\" (UniqueName: \"kubernetes.io/projected/04963c19-bc7b-41c9-8b48-e3b8653738a8-kube-api-access-nwk24\") pod \"neutron-operator-controller-manager-8d984cc4d-jcb62\" (UID: \"04963c19-bc7b-41c9-8b48-e3b8653738a8\") " pod="openstack-operators/neutron-operator-controller-manager-8d984cc4d-jcb62" Oct 07 08:10:23 crc kubenswrapper[4875]: I1007 08:10:23.443386 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nm855\" (UniqueName: \"kubernetes.io/projected/d848105d-5f25-435d-bc92-fc6f9eac9749-kube-api-access-nm855\") pod \"octavia-operator-controller-manager-7468f855d8-wr55g\" (UID: \"d848105d-5f25-435d-bc92-fc6f9eac9749\") " pod="openstack-operators/octavia-operator-controller-manager-7468f855d8-wr55g" Oct 07 08:10:23 crc kubenswrapper[4875]: I1007 08:10:23.443415 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wf6zz\" (UniqueName: \"kubernetes.io/projected/7b5bb2f7-a322-4af4-81dc-d9b104b2bb85-kube-api-access-wf6zz\") pod \"nova-operator-controller-manager-7c7fc454ff-2s4ks\" (UID: \"7b5bb2f7-a322-4af4-81dc-d9b104b2bb85\") " pod="openstack-operators/nova-operator-controller-manager-7c7fc454ff-2s4ks" Oct 07 08:10:23 crc kubenswrapper[4875]: I1007 08:10:23.443459 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/052c6076-1098-41ac-a80d-11879a2e08bc-cert\") pod \"openstack-baremetal-operator-controller-manager-5dfbbd665cxk99t\" (UID: \"052c6076-1098-41ac-a80d-11879a2e08bc\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-5dfbbd665cxk99t" Oct 07 08:10:23 crc kubenswrapper[4875]: I1007 08:10:23.443489 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4zj9v\" (UniqueName: \"kubernetes.io/projected/052c6076-1098-41ac-a80d-11879a2e08bc-kube-api-access-4zj9v\") pod \"openstack-baremetal-operator-controller-manager-5dfbbd665cxk99t\" (UID: \"052c6076-1098-41ac-a80d-11879a2e08bc\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-5dfbbd665cxk99t" Oct 07 08:10:23 crc kubenswrapper[4875]: I1007 08:10:23.457702 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-6d8b6f9b9-dtxrv"] Oct 07 08:10:23 crc kubenswrapper[4875]: I1007 08:10:23.481597 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t997v\" (UniqueName: \"kubernetes.io/projected/f03c0528-11ac-4b93-8f46-4415192ba694-kube-api-access-t997v\") pod \"ironic-operator-controller-manager-649675d675-6vp52\" (UID: \"f03c0528-11ac-4b93-8f46-4415192ba694\") " pod="openstack-operators/ironic-operator-controller-manager-649675d675-6vp52" Oct 07 08:10:23 crc kubenswrapper[4875]: I1007 08:10:23.493327 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wkblm\" (UniqueName: \"kubernetes.io/projected/d9b65818-b46c-4a3d-8ed2-53d04e3dc834-kube-api-access-wkblm\") pod \"mariadb-operator-controller-manager-6cd6d7bdf5-n4t9j\" (UID: \"d9b65818-b46c-4a3d-8ed2-53d04e3dc834\") " pod="openstack-operators/mariadb-operator-controller-manager-6cd6d7bdf5-n4t9j" Oct 07 08:10:23 crc kubenswrapper[4875]: I1007 08:10:23.495254 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nwk24\" (UniqueName: \"kubernetes.io/projected/04963c19-bc7b-41c9-8b48-e3b8653738a8-kube-api-access-nwk24\") pod \"neutron-operator-controller-manager-8d984cc4d-jcb62\" (UID: \"04963c19-bc7b-41c9-8b48-e3b8653738a8\") " pod="openstack-operators/neutron-operator-controller-manager-8d984cc4d-jcb62" Oct 07 08:10:23 crc kubenswrapper[4875]: I1007 08:10:23.508924 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/placement-operator-controller-manager-54689d9f88-8b99x"] Oct 07 08:10:23 crc kubenswrapper[4875]: I1007 08:10:23.510720 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-54689d9f88-8b99x" Oct 07 08:10:23 crc kubenswrapper[4875]: I1007 08:10:23.528419 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"placement-operator-controller-manager-dockercfg-8ftht" Oct 07 08:10:23 crc kubenswrapper[4875]: I1007 08:10:23.545835 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-54689d9f88-8b99x"] Oct 07 08:10:23 crc kubenswrapper[4875]: I1007 08:10:23.550241 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/swift-operator-controller-manager-6859f9b676-gfr8f"] Oct 07 08:10:23 crc kubenswrapper[4875]: I1007 08:10:23.554144 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/052c6076-1098-41ac-a80d-11879a2e08bc-cert\") pod \"openstack-baremetal-operator-controller-manager-5dfbbd665cxk99t\" (UID: \"052c6076-1098-41ac-a80d-11879a2e08bc\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-5dfbbd665cxk99t" Oct 07 08:10:23 crc kubenswrapper[4875]: I1007 08:10:23.554206 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4zj9v\" (UniqueName: \"kubernetes.io/projected/052c6076-1098-41ac-a80d-11879a2e08bc-kube-api-access-4zj9v\") pod \"openstack-baremetal-operator-controller-manager-5dfbbd665cxk99t\" (UID: \"052c6076-1098-41ac-a80d-11879a2e08bc\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-5dfbbd665cxk99t" Oct 07 08:10:23 crc kubenswrapper[4875]: I1007 08:10:23.554262 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6zxwl\" (UniqueName: \"kubernetes.io/projected/f5fb651c-9cca-4f7c-9136-37534358a8dd-kube-api-access-6zxwl\") pod \"ovn-operator-controller-manager-6d8b6f9b9-dtxrv\" (UID: \"f5fb651c-9cca-4f7c-9136-37534358a8dd\") " pod="openstack-operators/ovn-operator-controller-manager-6d8b6f9b9-dtxrv" Oct 07 08:10:23 crc kubenswrapper[4875]: I1007 08:10:23.554339 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vvdrx\" (UniqueName: \"kubernetes.io/projected/9e09310b-4437-4e30-881f-ed2dd568aa16-kube-api-access-vvdrx\") pod \"placement-operator-controller-manager-54689d9f88-8b99x\" (UID: \"9e09310b-4437-4e30-881f-ed2dd568aa16\") " pod="openstack-operators/placement-operator-controller-manager-54689d9f88-8b99x" Oct 07 08:10:23 crc kubenswrapper[4875]: I1007 08:10:23.554364 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nm855\" (UniqueName: \"kubernetes.io/projected/d848105d-5f25-435d-bc92-fc6f9eac9749-kube-api-access-nm855\") pod \"octavia-operator-controller-manager-7468f855d8-wr55g\" (UID: \"d848105d-5f25-435d-bc92-fc6f9eac9749\") " pod="openstack-operators/octavia-operator-controller-manager-7468f855d8-wr55g" Oct 07 08:10:23 crc kubenswrapper[4875]: I1007 08:10:23.554399 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wf6zz\" (UniqueName: \"kubernetes.io/projected/7b5bb2f7-a322-4af4-81dc-d9b104b2bb85-kube-api-access-wf6zz\") pod \"nova-operator-controller-manager-7c7fc454ff-2s4ks\" (UID: \"7b5bb2f7-a322-4af4-81dc-d9b104b2bb85\") " pod="openstack-operators/nova-operator-controller-manager-7c7fc454ff-2s4ks" Oct 07 08:10:23 crc kubenswrapper[4875]: E1007 08:10:23.554907 4875 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Oct 07 08:10:23 crc kubenswrapper[4875]: E1007 08:10:23.554984 4875 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/052c6076-1098-41ac-a80d-11879a2e08bc-cert podName:052c6076-1098-41ac-a80d-11879a2e08bc nodeName:}" failed. No retries permitted until 2025-10-07 08:10:24.054953842 +0000 UTC m=+849.014724385 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/052c6076-1098-41ac-a80d-11879a2e08bc-cert") pod "openstack-baremetal-operator-controller-manager-5dfbbd665cxk99t" (UID: "052c6076-1098-41ac-a80d-11879a2e08bc") : secret "openstack-baremetal-operator-webhook-server-cert" not found Oct 07 08:10:23 crc kubenswrapper[4875]: I1007 08:10:23.567428 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-6859f9b676-gfr8f" Oct 07 08:10:23 crc kubenswrapper[4875]: I1007 08:10:23.576502 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-5d4d74dd89-z98x2"] Oct 07 08:10:23 crc kubenswrapper[4875]: I1007 08:10:23.578429 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"swift-operator-controller-manager-dockercfg-2979k" Oct 07 08:10:23 crc kubenswrapper[4875]: I1007 08:10:23.590448 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-5d4d74dd89-z98x2" Oct 07 08:10:23 crc kubenswrapper[4875]: I1007 08:10:23.594922 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-6859f9b676-gfr8f"] Oct 07 08:10:23 crc kubenswrapper[4875]: I1007 08:10:23.608662 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-649675d675-6vp52" Oct 07 08:10:23 crc kubenswrapper[4875]: I1007 08:10:23.609814 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4zj9v\" (UniqueName: \"kubernetes.io/projected/052c6076-1098-41ac-a80d-11879a2e08bc-kube-api-access-4zj9v\") pod \"openstack-baremetal-operator-controller-manager-5dfbbd665cxk99t\" (UID: \"052c6076-1098-41ac-a80d-11879a2e08bc\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-5dfbbd665cxk99t" Oct 07 08:10:23 crc kubenswrapper[4875]: I1007 08:10:23.614232 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nm855\" (UniqueName: \"kubernetes.io/projected/d848105d-5f25-435d-bc92-fc6f9eac9749-kube-api-access-nm855\") pod \"octavia-operator-controller-manager-7468f855d8-wr55g\" (UID: \"d848105d-5f25-435d-bc92-fc6f9eac9749\") " pod="openstack-operators/octavia-operator-controller-manager-7468f855d8-wr55g" Oct 07 08:10:23 crc kubenswrapper[4875]: I1007 08:10:23.619031 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"telemetry-operator-controller-manager-dockercfg-xmkhl" Oct 07 08:10:23 crc kubenswrapper[4875]: I1007 08:10:23.620022 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6zxwl\" (UniqueName: \"kubernetes.io/projected/f5fb651c-9cca-4f7c-9136-37534358a8dd-kube-api-access-6zxwl\") pod \"ovn-operator-controller-manager-6d8b6f9b9-dtxrv\" (UID: \"f5fb651c-9cca-4f7c-9136-37534358a8dd\") " pod="openstack-operators/ovn-operator-controller-manager-6d8b6f9b9-dtxrv" Oct 07 08:10:23 crc kubenswrapper[4875]: I1007 08:10:23.630754 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-7b5ccf6d9c-lvqtd" Oct 07 08:10:23 crc kubenswrapper[4875]: I1007 08:10:23.635052 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-6d8b6f9b9-dtxrv" Oct 07 08:10:23 crc kubenswrapper[4875]: I1007 08:10:23.657769 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vvdrx\" (UniqueName: \"kubernetes.io/projected/9e09310b-4437-4e30-881f-ed2dd568aa16-kube-api-access-vvdrx\") pod \"placement-operator-controller-manager-54689d9f88-8b99x\" (UID: \"9e09310b-4437-4e30-881f-ed2dd568aa16\") " pod="openstack-operators/placement-operator-controller-manager-54689d9f88-8b99x" Oct 07 08:10:23 crc kubenswrapper[4875]: I1007 08:10:23.658270 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-65d89cfd9f-zwhfp" Oct 07 08:10:23 crc kubenswrapper[4875]: I1007 08:10:23.672594 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wf6zz\" (UniqueName: \"kubernetes.io/projected/7b5bb2f7-a322-4af4-81dc-d9b104b2bb85-kube-api-access-wf6zz\") pod \"nova-operator-controller-manager-7c7fc454ff-2s4ks\" (UID: \"7b5bb2f7-a322-4af4-81dc-d9b104b2bb85\") " pod="openstack-operators/nova-operator-controller-manager-7c7fc454ff-2s4ks" Oct 07 08:10:23 crc kubenswrapper[4875]: I1007 08:10:23.723255 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/test-operator-controller-manager-5cd5cb47d7-l8gnr"] Oct 07 08:10:23 crc kubenswrapper[4875]: I1007 08:10:23.725797 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-6cd6d7bdf5-n4t9j" Oct 07 08:10:23 crc kubenswrapper[4875]: I1007 08:10:23.740783 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vvdrx\" (UniqueName: \"kubernetes.io/projected/9e09310b-4437-4e30-881f-ed2dd568aa16-kube-api-access-vvdrx\") pod \"placement-operator-controller-manager-54689d9f88-8b99x\" (UID: \"9e09310b-4437-4e30-881f-ed2dd568aa16\") " pod="openstack-operators/placement-operator-controller-manager-54689d9f88-8b99x" Oct 07 08:10:23 crc kubenswrapper[4875]: I1007 08:10:23.755255 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-5cd5cb47d7-l8gnr" Oct 07 08:10:23 crc kubenswrapper[4875]: I1007 08:10:23.768391 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-8d984cc4d-jcb62" Oct 07 08:10:23 crc kubenswrapper[4875]: I1007 08:10:23.776953 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mrwvx\" (UniqueName: \"kubernetes.io/projected/4aab0800-b79f-42f6-8d27-ce34e631f086-kube-api-access-mrwvx\") pod \"telemetry-operator-controller-manager-5d4d74dd89-z98x2\" (UID: \"4aab0800-b79f-42f6-8d27-ce34e631f086\") " pod="openstack-operators/telemetry-operator-controller-manager-5d4d74dd89-z98x2" Oct 07 08:10:23 crc kubenswrapper[4875]: I1007 08:10:23.777148 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-48cj7\" (UniqueName: \"kubernetes.io/projected/91189044-d565-4a5e-9766-1bd11f300f11-kube-api-access-48cj7\") pod \"swift-operator-controller-manager-6859f9b676-gfr8f\" (UID: \"91189044-d565-4a5e-9766-1bd11f300f11\") " pod="openstack-operators/swift-operator-controller-manager-6859f9b676-gfr8f" Oct 07 08:10:23 crc kubenswrapper[4875]: I1007 08:10:23.781921 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"test-operator-controller-manager-dockercfg-vgf8x" Oct 07 08:10:23 crc kubenswrapper[4875]: I1007 08:10:23.799317 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-7468f855d8-wr55g" Oct 07 08:10:23 crc kubenswrapper[4875]: I1007 08:10:23.827143 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-5d4d74dd89-z98x2"] Oct 07 08:10:23 crc kubenswrapper[4875]: I1007 08:10:23.855097 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-5cd5cb47d7-l8gnr"] Oct 07 08:10:23 crc kubenswrapper[4875]: I1007 08:10:23.882094 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-7c7fc454ff-2s4ks" Oct 07 08:10:23 crc kubenswrapper[4875]: I1007 08:10:23.883177 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mrwvx\" (UniqueName: \"kubernetes.io/projected/4aab0800-b79f-42f6-8d27-ce34e631f086-kube-api-access-mrwvx\") pod \"telemetry-operator-controller-manager-5d4d74dd89-z98x2\" (UID: \"4aab0800-b79f-42f6-8d27-ce34e631f086\") " pod="openstack-operators/telemetry-operator-controller-manager-5d4d74dd89-z98x2" Oct 07 08:10:23 crc kubenswrapper[4875]: I1007 08:10:23.883236 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-48cj7\" (UniqueName: \"kubernetes.io/projected/91189044-d565-4a5e-9766-1bd11f300f11-kube-api-access-48cj7\") pod \"swift-operator-controller-manager-6859f9b676-gfr8f\" (UID: \"91189044-d565-4a5e-9766-1bd11f300f11\") " pod="openstack-operators/swift-operator-controller-manager-6859f9b676-gfr8f" Oct 07 08:10:23 crc kubenswrapper[4875]: I1007 08:10:23.883263 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zc2w2\" (UniqueName: \"kubernetes.io/projected/c57705e3-3b4e-4252-8c8d-0a21084ff5d8-kube-api-access-zc2w2\") pod \"test-operator-controller-manager-5cd5cb47d7-l8gnr\" (UID: \"c57705e3-3b4e-4252-8c8d-0a21084ff5d8\") " pod="openstack-operators/test-operator-controller-manager-5cd5cb47d7-l8gnr" Oct 07 08:10:23 crc kubenswrapper[4875]: I1007 08:10:23.904608 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/watcher-operator-controller-manager-6cbc6dd547-9njzw"] Oct 07 08:10:23 crc kubenswrapper[4875]: I1007 08:10:23.906148 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-6cbc6dd547-9njzw" Oct 07 08:10:23 crc kubenswrapper[4875]: I1007 08:10:23.906698 4875 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-4qtlx" Oct 07 08:10:23 crc kubenswrapper[4875]: I1007 08:10:23.906735 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-4qtlx" Oct 07 08:10:23 crc kubenswrapper[4875]: I1007 08:10:23.911223 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-6cbc6dd547-9njzw"] Oct 07 08:10:23 crc kubenswrapper[4875]: I1007 08:10:23.918653 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-48cj7\" (UniqueName: \"kubernetes.io/projected/91189044-d565-4a5e-9766-1bd11f300f11-kube-api-access-48cj7\") pod \"swift-operator-controller-manager-6859f9b676-gfr8f\" (UID: \"91189044-d565-4a5e-9766-1bd11f300f11\") " pod="openstack-operators/swift-operator-controller-manager-6859f9b676-gfr8f" Oct 07 08:10:23 crc kubenswrapper[4875]: I1007 08:10:23.933856 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"watcher-operator-controller-manager-dockercfg-97hc8" Oct 07 08:10:23 crc kubenswrapper[4875]: I1007 08:10:23.940662 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mrwvx\" (UniqueName: \"kubernetes.io/projected/4aab0800-b79f-42f6-8d27-ce34e631f086-kube-api-access-mrwvx\") pod \"telemetry-operator-controller-manager-5d4d74dd89-z98x2\" (UID: \"4aab0800-b79f-42f6-8d27-ce34e631f086\") " pod="openstack-operators/telemetry-operator-controller-manager-5d4d74dd89-z98x2" Oct 07 08:10:23 crc kubenswrapper[4875]: I1007 08:10:23.968342 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-54689d9f88-8b99x" Oct 07 08:10:23 crc kubenswrapper[4875]: I1007 08:10:23.983458 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-manager-667f8c4c67-cltql"] Oct 07 08:10:23 crc kubenswrapper[4875]: I1007 08:10:23.985373 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-667f8c4c67-cltql" Oct 07 08:10:23 crc kubenswrapper[4875]: I1007 08:10:23.990261 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"webhook-server-cert" Oct 07 08:10:23 crc kubenswrapper[4875]: I1007 08:10:23.990558 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-manager-dockercfg-sg45t" Oct 07 08:10:23 crc kubenswrapper[4875]: I1007 08:10:23.991067 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l2xf7\" (UniqueName: \"kubernetes.io/projected/d4352aef-6eec-4342-8b1f-67a0bf3459f2-kube-api-access-l2xf7\") pod \"watcher-operator-controller-manager-6cbc6dd547-9njzw\" (UID: \"d4352aef-6eec-4342-8b1f-67a0bf3459f2\") " pod="openstack-operators/watcher-operator-controller-manager-6cbc6dd547-9njzw" Oct 07 08:10:23 crc kubenswrapper[4875]: I1007 08:10:23.991217 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zc2w2\" (UniqueName: \"kubernetes.io/projected/c57705e3-3b4e-4252-8c8d-0a21084ff5d8-kube-api-access-zc2w2\") pod \"test-operator-controller-manager-5cd5cb47d7-l8gnr\" (UID: \"c57705e3-3b4e-4252-8c8d-0a21084ff5d8\") " pod="openstack-operators/test-operator-controller-manager-5cd5cb47d7-l8gnr" Oct 07 08:10:23 crc kubenswrapper[4875]: I1007 08:10:23.999657 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-667f8c4c67-cltql"] Oct 07 08:10:24 crc kubenswrapper[4875]: I1007 08:10:24.012153 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-58c4cd55f4-tslhs"] Oct 07 08:10:24 crc kubenswrapper[4875]: I1007 08:10:24.025382 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zc2w2\" (UniqueName: \"kubernetes.io/projected/c57705e3-3b4e-4252-8c8d-0a21084ff5d8-kube-api-access-zc2w2\") pod \"test-operator-controller-manager-5cd5cb47d7-l8gnr\" (UID: \"c57705e3-3b4e-4252-8c8d-0a21084ff5d8\") " pod="openstack-operators/test-operator-controller-manager-5cd5cb47d7-l8gnr" Oct 07 08:10:24 crc kubenswrapper[4875]: I1007 08:10:24.039857 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-rljtx"] Oct 07 08:10:24 crc kubenswrapper[4875]: I1007 08:10:24.042429 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-rljtx" Oct 07 08:10:24 crc kubenswrapper[4875]: I1007 08:10:24.046395 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-rljtx"] Oct 07 08:10:24 crc kubenswrapper[4875]: I1007 08:10:24.051401 4875 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-4qtlx" Oct 07 08:10:24 crc kubenswrapper[4875]: I1007 08:10:24.052318 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"rabbitmq-cluster-operator-controller-manager-dockercfg-nmrmb" Oct 07 08:10:24 crc kubenswrapper[4875]: I1007 08:10:24.093586 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l2xf7\" (UniqueName: \"kubernetes.io/projected/d4352aef-6eec-4342-8b1f-67a0bf3459f2-kube-api-access-l2xf7\") pod \"watcher-operator-controller-manager-6cbc6dd547-9njzw\" (UID: \"d4352aef-6eec-4342-8b1f-67a0bf3459f2\") " pod="openstack-operators/watcher-operator-controller-manager-6cbc6dd547-9njzw" Oct 07 08:10:24 crc kubenswrapper[4875]: I1007 08:10:24.093642 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/052c6076-1098-41ac-a80d-11879a2e08bc-cert\") pod \"openstack-baremetal-operator-controller-manager-5dfbbd665cxk99t\" (UID: \"052c6076-1098-41ac-a80d-11879a2e08bc\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-5dfbbd665cxk99t" Oct 07 08:10:24 crc kubenswrapper[4875]: I1007 08:10:24.093940 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/1f9b6e86-8f57-4622-bc91-b062f04ec29f-cert\") pod \"openstack-operator-controller-manager-667f8c4c67-cltql\" (UID: \"1f9b6e86-8f57-4622-bc91-b062f04ec29f\") " pod="openstack-operators/openstack-operator-controller-manager-667f8c4c67-cltql" Oct 07 08:10:24 crc kubenswrapper[4875]: I1007 08:10:24.093998 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-42kqr\" (UniqueName: \"kubernetes.io/projected/1f9b6e86-8f57-4622-bc91-b062f04ec29f-kube-api-access-42kqr\") pod \"openstack-operator-controller-manager-667f8c4c67-cltql\" (UID: \"1f9b6e86-8f57-4622-bc91-b062f04ec29f\") " pod="openstack-operators/openstack-operator-controller-manager-667f8c4c67-cltql" Oct 07 08:10:24 crc kubenswrapper[4875]: E1007 08:10:24.100531 4875 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Oct 07 08:10:24 crc kubenswrapper[4875]: E1007 08:10:24.100599 4875 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/052c6076-1098-41ac-a80d-11879a2e08bc-cert podName:052c6076-1098-41ac-a80d-11879a2e08bc nodeName:}" failed. No retries permitted until 2025-10-07 08:10:25.10058086 +0000 UTC m=+850.060351403 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/052c6076-1098-41ac-a80d-11879a2e08bc-cert") pod "openstack-baremetal-operator-controller-manager-5dfbbd665cxk99t" (UID: "052c6076-1098-41ac-a80d-11879a2e08bc") : secret "openstack-baremetal-operator-webhook-server-cert" not found Oct 07 08:10:24 crc kubenswrapper[4875]: I1007 08:10:24.109369 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-5d4d74dd89-z98x2" Oct 07 08:10:24 crc kubenswrapper[4875]: I1007 08:10:24.113080 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-6859f9b676-gfr8f" Oct 07 08:10:24 crc kubenswrapper[4875]: I1007 08:10:24.120976 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l2xf7\" (UniqueName: \"kubernetes.io/projected/d4352aef-6eec-4342-8b1f-67a0bf3459f2-kube-api-access-l2xf7\") pod \"watcher-operator-controller-manager-6cbc6dd547-9njzw\" (UID: \"d4352aef-6eec-4342-8b1f-67a0bf3459f2\") " pod="openstack-operators/watcher-operator-controller-manager-6cbc6dd547-9njzw" Oct 07 08:10:24 crc kubenswrapper[4875]: I1007 08:10:24.136399 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-5cd5cb47d7-l8gnr" Oct 07 08:10:24 crc kubenswrapper[4875]: I1007 08:10:24.198356 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-42kqr\" (UniqueName: \"kubernetes.io/projected/1f9b6e86-8f57-4622-bc91-b062f04ec29f-kube-api-access-42kqr\") pod \"openstack-operator-controller-manager-667f8c4c67-cltql\" (UID: \"1f9b6e86-8f57-4622-bc91-b062f04ec29f\") " pod="openstack-operators/openstack-operator-controller-manager-667f8c4c67-cltql" Oct 07 08:10:24 crc kubenswrapper[4875]: I1007 08:10:24.200784 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bpjw7\" (UniqueName: \"kubernetes.io/projected/1b1c1a64-b0a5-4c2f-a43d-7cde7774094a-kube-api-access-bpjw7\") pod \"rabbitmq-cluster-operator-manager-5f97d8c699-rljtx\" (UID: \"1b1c1a64-b0a5-4c2f-a43d-7cde7774094a\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-rljtx" Oct 07 08:10:24 crc kubenswrapper[4875]: I1007 08:10:24.200829 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/1f9b6e86-8f57-4622-bc91-b062f04ec29f-cert\") pod \"openstack-operator-controller-manager-667f8c4c67-cltql\" (UID: \"1f9b6e86-8f57-4622-bc91-b062f04ec29f\") " pod="openstack-operators/openstack-operator-controller-manager-667f8c4c67-cltql" Oct 07 08:10:24 crc kubenswrapper[4875]: E1007 08:10:24.201106 4875 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Oct 07 08:10:24 crc kubenswrapper[4875]: E1007 08:10:24.201170 4875 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/1f9b6e86-8f57-4622-bc91-b062f04ec29f-cert podName:1f9b6e86-8f57-4622-bc91-b062f04ec29f nodeName:}" failed. No retries permitted until 2025-10-07 08:10:24.701149232 +0000 UTC m=+849.660919775 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/1f9b6e86-8f57-4622-bc91-b062f04ec29f-cert") pod "openstack-operator-controller-manager-667f8c4c67-cltql" (UID: "1f9b6e86-8f57-4622-bc91-b062f04ec29f") : secret "webhook-server-cert" not found Oct 07 08:10:24 crc kubenswrapper[4875]: I1007 08:10:24.254432 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-6cbc6dd547-9njzw" Oct 07 08:10:24 crc kubenswrapper[4875]: I1007 08:10:24.255786 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-42kqr\" (UniqueName: \"kubernetes.io/projected/1f9b6e86-8f57-4622-bc91-b062f04ec29f-kube-api-access-42kqr\") pod \"openstack-operator-controller-manager-667f8c4c67-cltql\" (UID: \"1f9b6e86-8f57-4622-bc91-b062f04ec29f\") " pod="openstack-operators/openstack-operator-controller-manager-667f8c4c67-cltql" Oct 07 08:10:24 crc kubenswrapper[4875]: I1007 08:10:24.266947 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-75dfd9b554-n9gbs"] Oct 07 08:10:24 crc kubenswrapper[4875]: I1007 08:10:24.273264 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-7d4d4f8d-8chkp"] Oct 07 08:10:24 crc kubenswrapper[4875]: I1007 08:10:24.304692 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bpjw7\" (UniqueName: \"kubernetes.io/projected/1b1c1a64-b0a5-4c2f-a43d-7cde7774094a-kube-api-access-bpjw7\") pod \"rabbitmq-cluster-operator-manager-5f97d8c699-rljtx\" (UID: \"1b1c1a64-b0a5-4c2f-a43d-7cde7774094a\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-rljtx" Oct 07 08:10:24 crc kubenswrapper[4875]: I1007 08:10:24.339478 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bpjw7\" (UniqueName: \"kubernetes.io/projected/1b1c1a64-b0a5-4c2f-a43d-7cde7774094a-kube-api-access-bpjw7\") pod \"rabbitmq-cluster-operator-manager-5f97d8c699-rljtx\" (UID: \"1b1c1a64-b0a5-4c2f-a43d-7cde7774094a\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-rljtx" Oct 07 08:10:24 crc kubenswrapper[4875]: I1007 08:10:24.343642 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-658588b8c9-l77jd"] Oct 07 08:10:24 crc kubenswrapper[4875]: I1007 08:10:24.390148 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-rljtx" Oct 07 08:10:24 crc kubenswrapper[4875]: I1007 08:10:24.671221 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-5dc44df7d5-zpbdj"] Oct 07 08:10:24 crc kubenswrapper[4875]: I1007 08:10:24.718824 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/1f9b6e86-8f57-4622-bc91-b062f04ec29f-cert\") pod \"openstack-operator-controller-manager-667f8c4c67-cltql\" (UID: \"1f9b6e86-8f57-4622-bc91-b062f04ec29f\") " pod="openstack-operators/openstack-operator-controller-manager-667f8c4c67-cltql" Oct 07 08:10:24 crc kubenswrapper[4875]: I1007 08:10:24.731842 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/1f9b6e86-8f57-4622-bc91-b062f04ec29f-cert\") pod \"openstack-operator-controller-manager-667f8c4c67-cltql\" (UID: \"1f9b6e86-8f57-4622-bc91-b062f04ec29f\") " pod="openstack-operators/openstack-operator-controller-manager-667f8c4c67-cltql" Oct 07 08:10:24 crc kubenswrapper[4875]: W1007 08:10:24.743504 4875 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb9617ed2_0ac9_45b8_8089_1091ff8937dd.slice/crio-1e092eca91ad5092a671d496098c6c1970efa5fd144f6c40656c8e9e1acd8b25 WatchSource:0}: Error finding container 1e092eca91ad5092a671d496098c6c1970efa5fd144f6c40656c8e9e1acd8b25: Status 404 returned error can't find the container with id 1e092eca91ad5092a671d496098c6c1970efa5fd144f6c40656c8e9e1acd8b25 Oct 07 08:10:24 crc kubenswrapper[4875]: I1007 08:10:24.759871 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-54b4974c45-t97rc"] Oct 07 08:10:24 crc kubenswrapper[4875]: I1007 08:10:24.781320 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-658588b8c9-l77jd" event={"ID":"0cce3a76-3617-40be-8d2b-b8f9184e6b61","Type":"ContainerStarted","Data":"82877db5afdbefc94c246528caefe0bad14fd7578a8756fff8d117b1e6d891cb"} Oct 07 08:10:24 crc kubenswrapper[4875]: W1007 08:10:24.788385 4875 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0c4de86e_6c46_4292_8c13_faeff0997ac4.slice/crio-80654a79fc2df22a22c8a59e201048ee17b3d86940e1e715ed24e63c617964a6 WatchSource:0}: Error finding container 80654a79fc2df22a22c8a59e201048ee17b3d86940e1e715ed24e63c617964a6: Status 404 returned error can't find the container with id 80654a79fc2df22a22c8a59e201048ee17b3d86940e1e715ed24e63c617964a6 Oct 07 08:10:24 crc kubenswrapper[4875]: I1007 08:10:24.799275 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-7d4d4f8d-8chkp" event={"ID":"ecbab85b-669d-4669-aa95-597dc630b7e6","Type":"ContainerStarted","Data":"0f63f9a143d71d7b663f9afbddbbcdf891fbf8cfa704160f63f8a9a72e82e3e6"} Oct 07 08:10:24 crc kubenswrapper[4875]: I1007 08:10:24.800762 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-76d5b87f47-g6cn6"] Oct 07 08:10:24 crc kubenswrapper[4875]: W1007 08:10:24.815828 4875 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod52379554_390c_4cb2_97ae_0cb0596f36d1.slice/crio-1f4c62b0963deaf67f598839dc57daa1078ccd4c20333332d9e1fd7438552187 WatchSource:0}: Error finding container 1f4c62b0963deaf67f598839dc57daa1078ccd4c20333332d9e1fd7438552187: Status 404 returned error can't find the container with id 1f4c62b0963deaf67f598839dc57daa1078ccd4c20333332d9e1fd7438552187 Oct 07 08:10:24 crc kubenswrapper[4875]: I1007 08:10:24.816083 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-75dfd9b554-n9gbs" event={"ID":"0ef97224-fe40-43a3-af95-f4e1986b8fbe","Type":"ContainerStarted","Data":"1f1572c13365d2efaaeb5b1af5bc3d1d76f2f5957be311ee48fe2ea137d7f098"} Oct 07 08:10:24 crc kubenswrapper[4875]: I1007 08:10:24.832113 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-5dc44df7d5-zpbdj" event={"ID":"b9617ed2-0ac9-45b8-8089-1091ff8937dd","Type":"ContainerStarted","Data":"1e092eca91ad5092a671d496098c6c1970efa5fd144f6c40656c8e9e1acd8b25"} Oct 07 08:10:24 crc kubenswrapper[4875]: I1007 08:10:24.848270 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-58c4cd55f4-tslhs" event={"ID":"cfd6de96-f3bd-4f0d-a076-f7c748b9b6a5","Type":"ContainerStarted","Data":"133ccaa74ebb479ce917c3d1bea734c820136c7a391f6254ef9435ac4fef08c1"} Oct 07 08:10:24 crc kubenswrapper[4875]: I1007 08:10:24.926736 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-649675d675-6vp52"] Oct 07 08:10:24 crc kubenswrapper[4875]: I1007 08:10:24.927630 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-667f8c4c67-cltql" Oct 07 08:10:24 crc kubenswrapper[4875]: I1007 08:10:24.929929 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-6cd6d7bdf5-n4t9j"] Oct 07 08:10:24 crc kubenswrapper[4875]: I1007 08:10:24.945679 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-4qtlx" Oct 07 08:10:24 crc kubenswrapper[4875]: W1007 08:10:24.946300 4875 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd9b65818_b46c_4a3d_8ed2_53d04e3dc834.slice/crio-44cf59632e52c0f2a3147a91d0ff98696ac3bbf873838d1a720513749b154111 WatchSource:0}: Error finding container 44cf59632e52c0f2a3147a91d0ff98696ac3bbf873838d1a720513749b154111: Status 404 returned error can't find the container with id 44cf59632e52c0f2a3147a91d0ff98696ac3bbf873838d1a720513749b154111 Oct 07 08:10:25 crc kubenswrapper[4875]: I1007 08:10:25.138930 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/052c6076-1098-41ac-a80d-11879a2e08bc-cert\") pod \"openstack-baremetal-operator-controller-manager-5dfbbd665cxk99t\" (UID: \"052c6076-1098-41ac-a80d-11879a2e08bc\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-5dfbbd665cxk99t" Oct 07 08:10:25 crc kubenswrapper[4875]: I1007 08:10:25.159075 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/052c6076-1098-41ac-a80d-11879a2e08bc-cert\") pod \"openstack-baremetal-operator-controller-manager-5dfbbd665cxk99t\" (UID: \"052c6076-1098-41ac-a80d-11879a2e08bc\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-5dfbbd665cxk99t" Oct 07 08:10:25 crc kubenswrapper[4875]: I1007 08:10:25.188674 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-65d89cfd9f-zwhfp"] Oct 07 08:10:25 crc kubenswrapper[4875]: I1007 08:10:25.208958 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-7c7fc454ff-2s4ks"] Oct 07 08:10:25 crc kubenswrapper[4875]: I1007 08:10:25.279603 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-6d8b6f9b9-dtxrv"] Oct 07 08:10:25 crc kubenswrapper[4875]: I1007 08:10:25.282257 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-7b5ccf6d9c-lvqtd"] Oct 07 08:10:25 crc kubenswrapper[4875]: W1007 08:10:25.302350 4875 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podab712745_89ea_43bb_b2d7_7192d3691acf.slice/crio-4057a490f585f35d3a702823364315bb9213a286159bd3871e93db2136421aff WatchSource:0}: Error finding container 4057a490f585f35d3a702823364315bb9213a286159bd3871e93db2136421aff: Status 404 returned error can't find the container with id 4057a490f585f35d3a702823364315bb9213a286159bd3871e93db2136421aff Oct 07 08:10:25 crc kubenswrapper[4875]: I1007 08:10:25.305074 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-7468f855d8-wr55g"] Oct 07 08:10:25 crc kubenswrapper[4875]: I1007 08:10:25.408413 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-5dfbbd665cxk99t" Oct 07 08:10:25 crc kubenswrapper[4875]: I1007 08:10:25.642461 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-8d984cc4d-jcb62"] Oct 07 08:10:25 crc kubenswrapper[4875]: I1007 08:10:25.657954 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-54689d9f88-8b99x"] Oct 07 08:10:25 crc kubenswrapper[4875]: I1007 08:10:25.664042 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-6cbc6dd547-9njzw"] Oct 07 08:10:25 crc kubenswrapper[4875]: I1007 08:10:25.668812 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-5cd5cb47d7-l8gnr"] Oct 07 08:10:25 crc kubenswrapper[4875]: W1007 08:10:25.674586 4875 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9e09310b_4437_4e30_881f_ed2dd568aa16.slice/crio-0bb592375d62f06537416b37007b274194311ac2f6e18b3312210df9e809683a WatchSource:0}: Error finding container 0bb592375d62f06537416b37007b274194311ac2f6e18b3312210df9e809683a: Status 404 returned error can't find the container with id 0bb592375d62f06537416b37007b274194311ac2f6e18b3312210df9e809683a Oct 07 08:10:25 crc kubenswrapper[4875]: W1007 08:10:25.675486 4875 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc57705e3_3b4e_4252_8c8d_0a21084ff5d8.slice/crio-f69af88725e517cd6ae0cae54c745ab3254fdbe4fd45ba2b0e9e0eee7eea50b1 WatchSource:0}: Error finding container f69af88725e517cd6ae0cae54c745ab3254fdbe4fd45ba2b0e9e0eee7eea50b1: Status 404 returned error can't find the container with id f69af88725e517cd6ae0cae54c745ab3254fdbe4fd45ba2b0e9e0eee7eea50b1 Oct 07 08:10:25 crc kubenswrapper[4875]: W1007 08:10:25.683216 4875 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4aab0800_b79f_42f6_8d27_ce34e631f086.slice/crio-02768c5593bfb56558189c1562d406498432e6d53f5d9596432bed745f29f9d5 WatchSource:0}: Error finding container 02768c5593bfb56558189c1562d406498432e6d53f5d9596432bed745f29f9d5: Status 404 returned error can't find the container with id 02768c5593bfb56558189c1562d406498432e6d53f5d9596432bed745f29f9d5 Oct 07 08:10:25 crc kubenswrapper[4875]: I1007 08:10:25.687271 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-6859f9b676-gfr8f"] Oct 07 08:10:25 crc kubenswrapper[4875]: I1007 08:10:25.693455 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-5d4d74dd89-z98x2"] Oct 07 08:10:25 crc kubenswrapper[4875]: E1007 08:10:25.703797 4875 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/watcher-operator@sha256:64f57b2b59dea2bd9fae91490c5bec2687131884a049e6579819d9f951b877c6,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-l2xf7,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod watcher-operator-controller-manager-6cbc6dd547-9njzw_openstack-operators(d4352aef-6eec-4342-8b1f-67a0bf3459f2): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Oct 07 08:10:25 crc kubenswrapper[4875]: E1007 08:10:25.703944 4875 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/swift-operator@sha256:637bb7b9ac308bc1e323391a3593b824f688090a856c83385814c17a571b1eed,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-48cj7,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod swift-operator-controller-manager-6859f9b676-gfr8f_openstack-operators(91189044-d565-4a5e-9766-1bd11f300f11): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Oct 07 08:10:25 crc kubenswrapper[4875]: E1007 08:10:25.704374 4875 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/neutron-operator@sha256:dfd044635f9df9ed1d249387fa622177db35cdc72475e1c570617b8d17c64862,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-nwk24,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod neutron-operator-controller-manager-8d984cc4d-jcb62_openstack-operators(04963c19-bc7b-41c9-8b48-e3b8653738a8): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Oct 07 08:10:25 crc kubenswrapper[4875]: E1007 08:10:25.723050 4875 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:operator,Image:quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2,Command:[/manager],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:metrics,HostPort:0,ContainerPort:9782,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:OPERATOR_NAMESPACE,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.namespace,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{200 -3} {} 200m DecimalSI},memory: {{524288000 0} {} 500Mi BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-bpjw7,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-cluster-operator-manager-5f97d8c699-rljtx_openstack-operators(1b1c1a64-b0a5-4c2f-a43d-7cde7774094a): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Oct 07 08:10:25 crc kubenswrapper[4875]: E1007 08:10:25.724463 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-rljtx" podUID="1b1c1a64-b0a5-4c2f-a43d-7cde7774094a" Oct 07 08:10:25 crc kubenswrapper[4875]: I1007 08:10:25.726147 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-rljtx"] Oct 07 08:10:25 crc kubenswrapper[4875]: I1007 08:10:25.733101 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-667f8c4c67-cltql"] Oct 07 08:10:25 crc kubenswrapper[4875]: W1007 08:10:25.803956 4875 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1f9b6e86_8f57_4622_bc91_b062f04ec29f.slice/crio-c070a3be8a68f86c30dae7156e8398986cfa821b404a9e65ef6236f1f3245ea7 WatchSource:0}: Error finding container c070a3be8a68f86c30dae7156e8398986cfa821b404a9e65ef6236f1f3245ea7: Status 404 returned error can't find the container with id c070a3be8a68f86c30dae7156e8398986cfa821b404a9e65ef6236f1f3245ea7 Oct 07 08:10:25 crc kubenswrapper[4875]: I1007 08:10:25.883130 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-54b4974c45-t97rc" event={"ID":"0c4de86e-6c46-4292-8c13-faeff0997ac4","Type":"ContainerStarted","Data":"80654a79fc2df22a22c8a59e201048ee17b3d86940e1e715ed24e63c617964a6"} Oct 07 08:10:25 crc kubenswrapper[4875]: I1007 08:10:25.893663 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-8d984cc4d-jcb62" event={"ID":"04963c19-bc7b-41c9-8b48-e3b8653738a8","Type":"ContainerStarted","Data":"f256fb0b99ea2807bc2b2b8267e3f303f3f1d25419c042e82e870449d3c79b9d"} Oct 07 08:10:25 crc kubenswrapper[4875]: I1007 08:10:25.897302 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-6859f9b676-gfr8f" event={"ID":"91189044-d565-4a5e-9766-1bd11f300f11","Type":"ContainerStarted","Data":"11cb76d49ffecac50b93e77746ab53780e936edad76800ba85a9b74f798370dc"} Oct 07 08:10:25 crc kubenswrapper[4875]: I1007 08:10:25.904473 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-7468f855d8-wr55g" event={"ID":"d848105d-5f25-435d-bc92-fc6f9eac9749","Type":"ContainerStarted","Data":"f31d4b4335d635b7dbc7fdc0a2cf74e6931266061f2733cd85223b54927d8827"} Oct 07 08:10:25 crc kubenswrapper[4875]: I1007 08:10:25.910732 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-65d89cfd9f-zwhfp" event={"ID":"45a1f3f1-6fbc-47d9-81c4-77e8ede46dd8","Type":"ContainerStarted","Data":"b65f440155af76c050181637b3e60dce5d329f632ad995045cbec57d568b9378"} Oct 07 08:10:25 crc kubenswrapper[4875]: I1007 08:10:25.913450 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-rljtx" event={"ID":"1b1c1a64-b0a5-4c2f-a43d-7cde7774094a","Type":"ContainerStarted","Data":"019ff066e578819a2dea54dc764aadbc7f2b3aca047b7ce9771de120ac3765e1"} Oct 07 08:10:25 crc kubenswrapper[4875]: I1007 08:10:25.915945 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-6cd6d7bdf5-n4t9j" event={"ID":"d9b65818-b46c-4a3d-8ed2-53d04e3dc834","Type":"ContainerStarted","Data":"44cf59632e52c0f2a3147a91d0ff98696ac3bbf873838d1a720513749b154111"} Oct 07 08:10:25 crc kubenswrapper[4875]: I1007 08:10:25.917827 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-667f8c4c67-cltql" event={"ID":"1f9b6e86-8f57-4622-bc91-b062f04ec29f","Type":"ContainerStarted","Data":"c070a3be8a68f86c30dae7156e8398986cfa821b404a9e65ef6236f1f3245ea7"} Oct 07 08:10:25 crc kubenswrapper[4875]: I1007 08:10:25.920995 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-76d5b87f47-g6cn6" event={"ID":"52379554-390c-4cb2-97ae-0cb0596f36d1","Type":"ContainerStarted","Data":"1f4c62b0963deaf67f598839dc57daa1078ccd4c20333332d9e1fd7438552187"} Oct 07 08:10:25 crc kubenswrapper[4875]: I1007 08:10:25.925859 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-649675d675-6vp52" event={"ID":"f03c0528-11ac-4b93-8f46-4415192ba694","Type":"ContainerStarted","Data":"a8e37250becf9f0104aa3a79d1b21a6f5a1c6b693479aad45fa1c61b2e36be46"} Oct 07 08:10:25 crc kubenswrapper[4875]: I1007 08:10:25.928577 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-5cd5cb47d7-l8gnr" event={"ID":"c57705e3-3b4e-4252-8c8d-0a21084ff5d8","Type":"ContainerStarted","Data":"f69af88725e517cd6ae0cae54c745ab3254fdbe4fd45ba2b0e9e0eee7eea50b1"} Oct 07 08:10:25 crc kubenswrapper[4875]: I1007 08:10:25.932433 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-5dfbbd665cxk99t"] Oct 07 08:10:25 crc kubenswrapper[4875]: E1007 08:10:25.946084 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-rljtx" podUID="1b1c1a64-b0a5-4c2f-a43d-7cde7774094a" Oct 07 08:10:25 crc kubenswrapper[4875]: I1007 08:10:25.947930 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-7c7fc454ff-2s4ks" event={"ID":"7b5bb2f7-a322-4af4-81dc-d9b104b2bb85","Type":"ContainerStarted","Data":"8acf46fc384b538b0b7c327eb6f4951c093d771405b7a5639d6340f24ed73a89"} Oct 07 08:10:25 crc kubenswrapper[4875]: I1007 08:10:25.991575 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-54689d9f88-8b99x" event={"ID":"9e09310b-4437-4e30-881f-ed2dd568aa16","Type":"ContainerStarted","Data":"0bb592375d62f06537416b37007b274194311ac2f6e18b3312210df9e809683a"} Oct 07 08:10:26 crc kubenswrapper[4875]: I1007 08:10:26.018302 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-5d4d74dd89-z98x2" event={"ID":"4aab0800-b79f-42f6-8d27-ce34e631f086","Type":"ContainerStarted","Data":"02768c5593bfb56558189c1562d406498432e6d53f5d9596432bed745f29f9d5"} Oct 07 08:10:26 crc kubenswrapper[4875]: I1007 08:10:26.040221 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-6cbc6dd547-9njzw" event={"ID":"d4352aef-6eec-4342-8b1f-67a0bf3459f2","Type":"ContainerStarted","Data":"8522424f1b3b21e92dc7afec8b9c5ea663bdf1d501cfb4ac634c487909432396"} Oct 07 08:10:26 crc kubenswrapper[4875]: I1007 08:10:26.046074 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-7b5ccf6d9c-lvqtd" event={"ID":"ab712745-89ea-43bb-b2d7-7192d3691acf","Type":"ContainerStarted","Data":"4057a490f585f35d3a702823364315bb9213a286159bd3871e93db2136421aff"} Oct 07 08:10:26 crc kubenswrapper[4875]: I1007 08:10:26.055781 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-6d8b6f9b9-dtxrv" event={"ID":"f5fb651c-9cca-4f7c-9136-37534358a8dd","Type":"ContainerStarted","Data":"05e6f2119e459880faaf4d9e1921582ac00ff216f14206841ad14b692fa4ad02"} Oct 07 08:10:26 crc kubenswrapper[4875]: E1007 08:10:26.136802 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/swift-operator-controller-manager-6859f9b676-gfr8f" podUID="91189044-d565-4a5e-9766-1bd11f300f11" Oct 07 08:10:26 crc kubenswrapper[4875]: E1007 08:10:26.179230 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/watcher-operator-controller-manager-6cbc6dd547-9njzw" podUID="d4352aef-6eec-4342-8b1f-67a0bf3459f2" Oct 07 08:10:26 crc kubenswrapper[4875]: E1007 08:10:26.257529 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/neutron-operator-controller-manager-8d984cc4d-jcb62" podUID="04963c19-bc7b-41c9-8b48-e3b8653738a8" Oct 07 08:10:27 crc kubenswrapper[4875]: I1007 08:10:27.091867 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-4qtlx"] Oct 07 08:10:27 crc kubenswrapper[4875]: I1007 08:10:27.133847 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-5dfbbd665cxk99t" event={"ID":"052c6076-1098-41ac-a80d-11879a2e08bc","Type":"ContainerStarted","Data":"2e191eedb736a66b2b50cd600561422906dbfbaa96c035b646620d09e7774926"} Oct 07 08:10:27 crc kubenswrapper[4875]: I1007 08:10:27.137167 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-6859f9b676-gfr8f" event={"ID":"91189044-d565-4a5e-9766-1bd11f300f11","Type":"ContainerStarted","Data":"ae59a6f92299605e465a32beb601fc7484633b6ee0e855148d8e988a9e70c081"} Oct 07 08:10:27 crc kubenswrapper[4875]: E1007 08:10:27.139097 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/swift-operator@sha256:637bb7b9ac308bc1e323391a3593b824f688090a856c83385814c17a571b1eed\\\"\"" pod="openstack-operators/swift-operator-controller-manager-6859f9b676-gfr8f" podUID="91189044-d565-4a5e-9766-1bd11f300f11" Oct 07 08:10:27 crc kubenswrapper[4875]: I1007 08:10:27.142593 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-6cbc6dd547-9njzw" event={"ID":"d4352aef-6eec-4342-8b1f-67a0bf3459f2","Type":"ContainerStarted","Data":"50c00f464e136621f701ed23312c898d875718da6dccf07eb2b7eb2a8f4681dd"} Oct 07 08:10:27 crc kubenswrapper[4875]: E1007 08:10:27.148056 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/watcher-operator@sha256:64f57b2b59dea2bd9fae91490c5bec2687131884a049e6579819d9f951b877c6\\\"\"" pod="openstack-operators/watcher-operator-controller-manager-6cbc6dd547-9njzw" podUID="d4352aef-6eec-4342-8b1f-67a0bf3459f2" Oct 07 08:10:27 crc kubenswrapper[4875]: I1007 08:10:27.179488 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-667f8c4c67-cltql" event={"ID":"1f9b6e86-8f57-4622-bc91-b062f04ec29f","Type":"ContainerStarted","Data":"5189d801fbed1be51e68e9baa62dbdb327ec7e6e52074d3e40728027855ad28e"} Oct 07 08:10:27 crc kubenswrapper[4875]: I1007 08:10:27.179560 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-667f8c4c67-cltql" event={"ID":"1f9b6e86-8f57-4622-bc91-b062f04ec29f","Type":"ContainerStarted","Data":"d66ea754d52d63c057c8c4a021590799fe0d0e0390e01db651c09528485fd2a1"} Oct 07 08:10:27 crc kubenswrapper[4875]: I1007 08:10:27.179723 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-manager-667f8c4c67-cltql" Oct 07 08:10:27 crc kubenswrapper[4875]: I1007 08:10:27.191751 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-8d984cc4d-jcb62" event={"ID":"04963c19-bc7b-41c9-8b48-e3b8653738a8","Type":"ContainerStarted","Data":"d7ed07694c8abc4592dd9b5ffa666218c5d5360e0121180ecd8036aa5efc426e"} Oct 07 08:10:27 crc kubenswrapper[4875]: I1007 08:10:27.191898 4875 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-4qtlx" podUID="90a5c426-84c6-4d3c-a161-a78990842890" containerName="registry-server" containerID="cri-o://309f4340b4a300593d103859c961934691a5d9e3e4146558a05e42adfb6201c6" gracePeriod=2 Oct 07 08:10:27 crc kubenswrapper[4875]: E1007 08:10:27.201741 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/neutron-operator@sha256:dfd044635f9df9ed1d249387fa622177db35cdc72475e1c570617b8d17c64862\\\"\"" pod="openstack-operators/neutron-operator-controller-manager-8d984cc4d-jcb62" podUID="04963c19-bc7b-41c9-8b48-e3b8653738a8" Oct 07 08:10:27 crc kubenswrapper[4875]: E1007 08:10:27.205909 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-rljtx" podUID="1b1c1a64-b0a5-4c2f-a43d-7cde7774094a" Oct 07 08:10:27 crc kubenswrapper[4875]: I1007 08:10:27.303087 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-manager-667f8c4c67-cltql" podStartSLOduration=4.303061743 podStartE2EDuration="4.303061743s" podCreationTimestamp="2025-10-07 08:10:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 08:10:27.292132324 +0000 UTC m=+852.251902857" watchObservedRunningTime="2025-10-07 08:10:27.303061743 +0000 UTC m=+852.262832286" Oct 07 08:10:27 crc kubenswrapper[4875]: I1007 08:10:27.784218 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-4qtlx" Oct 07 08:10:27 crc kubenswrapper[4875]: I1007 08:10:27.820694 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l2zvd\" (UniqueName: \"kubernetes.io/projected/90a5c426-84c6-4d3c-a161-a78990842890-kube-api-access-l2zvd\") pod \"90a5c426-84c6-4d3c-a161-a78990842890\" (UID: \"90a5c426-84c6-4d3c-a161-a78990842890\") " Oct 07 08:10:27 crc kubenswrapper[4875]: I1007 08:10:27.821926 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/90a5c426-84c6-4d3c-a161-a78990842890-catalog-content\") pod \"90a5c426-84c6-4d3c-a161-a78990842890\" (UID: \"90a5c426-84c6-4d3c-a161-a78990842890\") " Oct 07 08:10:27 crc kubenswrapper[4875]: I1007 08:10:27.822088 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/90a5c426-84c6-4d3c-a161-a78990842890-utilities\") pod \"90a5c426-84c6-4d3c-a161-a78990842890\" (UID: \"90a5c426-84c6-4d3c-a161-a78990842890\") " Oct 07 08:10:27 crc kubenswrapper[4875]: I1007 08:10:27.826645 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/90a5c426-84c6-4d3c-a161-a78990842890-utilities" (OuterVolumeSpecName: "utilities") pod "90a5c426-84c6-4d3c-a161-a78990842890" (UID: "90a5c426-84c6-4d3c-a161-a78990842890"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 08:10:27 crc kubenswrapper[4875]: I1007 08:10:27.850567 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/90a5c426-84c6-4d3c-a161-a78990842890-kube-api-access-l2zvd" (OuterVolumeSpecName: "kube-api-access-l2zvd") pod "90a5c426-84c6-4d3c-a161-a78990842890" (UID: "90a5c426-84c6-4d3c-a161-a78990842890"). InnerVolumeSpecName "kube-api-access-l2zvd". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 08:10:27 crc kubenswrapper[4875]: I1007 08:10:27.926928 4875 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/90a5c426-84c6-4d3c-a161-a78990842890-utilities\") on node \"crc\" DevicePath \"\"" Oct 07 08:10:27 crc kubenswrapper[4875]: I1007 08:10:27.926998 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l2zvd\" (UniqueName: \"kubernetes.io/projected/90a5c426-84c6-4d3c-a161-a78990842890-kube-api-access-l2zvd\") on node \"crc\" DevicePath \"\"" Oct 07 08:10:28 crc kubenswrapper[4875]: I1007 08:10:28.087017 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/90a5c426-84c6-4d3c-a161-a78990842890-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "90a5c426-84c6-4d3c-a161-a78990842890" (UID: "90a5c426-84c6-4d3c-a161-a78990842890"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 08:10:28 crc kubenswrapper[4875]: I1007 08:10:28.140436 4875 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/90a5c426-84c6-4d3c-a161-a78990842890-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 07 08:10:28 crc kubenswrapper[4875]: I1007 08:10:28.214210 4875 generic.go:334] "Generic (PLEG): container finished" podID="90a5c426-84c6-4d3c-a161-a78990842890" containerID="309f4340b4a300593d103859c961934691a5d9e3e4146558a05e42adfb6201c6" exitCode=0 Oct 07 08:10:28 crc kubenswrapper[4875]: I1007 08:10:28.214345 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-4qtlx" Oct 07 08:10:28 crc kubenswrapper[4875]: I1007 08:10:28.214368 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4qtlx" event={"ID":"90a5c426-84c6-4d3c-a161-a78990842890","Type":"ContainerDied","Data":"309f4340b4a300593d103859c961934691a5d9e3e4146558a05e42adfb6201c6"} Oct 07 08:10:28 crc kubenswrapper[4875]: I1007 08:10:28.214456 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4qtlx" event={"ID":"90a5c426-84c6-4d3c-a161-a78990842890","Type":"ContainerDied","Data":"b6599e60d7b9a067227eabbba272eaf3235e52abe7dba1159a2c73453b9c6e50"} Oct 07 08:10:28 crc kubenswrapper[4875]: I1007 08:10:28.214499 4875 scope.go:117] "RemoveContainer" containerID="309f4340b4a300593d103859c961934691a5d9e3e4146558a05e42adfb6201c6" Oct 07 08:10:28 crc kubenswrapper[4875]: E1007 08:10:28.217214 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/watcher-operator@sha256:64f57b2b59dea2bd9fae91490c5bec2687131884a049e6579819d9f951b877c6\\\"\"" pod="openstack-operators/watcher-operator-controller-manager-6cbc6dd547-9njzw" podUID="d4352aef-6eec-4342-8b1f-67a0bf3459f2" Oct 07 08:10:28 crc kubenswrapper[4875]: E1007 08:10:28.217863 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/neutron-operator@sha256:dfd044635f9df9ed1d249387fa622177db35cdc72475e1c570617b8d17c64862\\\"\"" pod="openstack-operators/neutron-operator-controller-manager-8d984cc4d-jcb62" podUID="04963c19-bc7b-41c9-8b48-e3b8653738a8" Oct 07 08:10:28 crc kubenswrapper[4875]: E1007 08:10:28.217860 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/swift-operator@sha256:637bb7b9ac308bc1e323391a3593b824f688090a856c83385814c17a571b1eed\\\"\"" pod="openstack-operators/swift-operator-controller-manager-6859f9b676-gfr8f" podUID="91189044-d565-4a5e-9766-1bd11f300f11" Oct 07 08:10:28 crc kubenswrapper[4875]: I1007 08:10:28.310169 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-4qtlx"] Oct 07 08:10:28 crc kubenswrapper[4875]: I1007 08:10:28.317697 4875 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-4qtlx"] Oct 07 08:10:28 crc kubenswrapper[4875]: I1007 08:10:28.510572 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-hvlwz"] Oct 07 08:10:28 crc kubenswrapper[4875]: E1007 08:10:28.510931 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="90a5c426-84c6-4d3c-a161-a78990842890" containerName="extract-content" Oct 07 08:10:28 crc kubenswrapper[4875]: I1007 08:10:28.510946 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="90a5c426-84c6-4d3c-a161-a78990842890" containerName="extract-content" Oct 07 08:10:28 crc kubenswrapper[4875]: E1007 08:10:28.510975 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="90a5c426-84c6-4d3c-a161-a78990842890" containerName="extract-utilities" Oct 07 08:10:28 crc kubenswrapper[4875]: I1007 08:10:28.510982 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="90a5c426-84c6-4d3c-a161-a78990842890" containerName="extract-utilities" Oct 07 08:10:28 crc kubenswrapper[4875]: E1007 08:10:28.510988 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="90a5c426-84c6-4d3c-a161-a78990842890" containerName="registry-server" Oct 07 08:10:28 crc kubenswrapper[4875]: I1007 08:10:28.510995 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="90a5c426-84c6-4d3c-a161-a78990842890" containerName="registry-server" Oct 07 08:10:28 crc kubenswrapper[4875]: I1007 08:10:28.511134 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="90a5c426-84c6-4d3c-a161-a78990842890" containerName="registry-server" Oct 07 08:10:28 crc kubenswrapper[4875]: I1007 08:10:28.512726 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-hvlwz" Oct 07 08:10:28 crc kubenswrapper[4875]: I1007 08:10:28.519496 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-hvlwz"] Oct 07 08:10:28 crc kubenswrapper[4875]: I1007 08:10:28.554153 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b1ea5bc6-a184-4e5f-9033-80d78eb736a9-catalog-content\") pod \"redhat-marketplace-hvlwz\" (UID: \"b1ea5bc6-a184-4e5f-9033-80d78eb736a9\") " pod="openshift-marketplace/redhat-marketplace-hvlwz" Oct 07 08:10:28 crc kubenswrapper[4875]: I1007 08:10:28.554695 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tt84q\" (UniqueName: \"kubernetes.io/projected/b1ea5bc6-a184-4e5f-9033-80d78eb736a9-kube-api-access-tt84q\") pod \"redhat-marketplace-hvlwz\" (UID: \"b1ea5bc6-a184-4e5f-9033-80d78eb736a9\") " pod="openshift-marketplace/redhat-marketplace-hvlwz" Oct 07 08:10:28 crc kubenswrapper[4875]: I1007 08:10:28.554826 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b1ea5bc6-a184-4e5f-9033-80d78eb736a9-utilities\") pod \"redhat-marketplace-hvlwz\" (UID: \"b1ea5bc6-a184-4e5f-9033-80d78eb736a9\") " pod="openshift-marketplace/redhat-marketplace-hvlwz" Oct 07 08:10:28 crc kubenswrapper[4875]: I1007 08:10:28.657299 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b1ea5bc6-a184-4e5f-9033-80d78eb736a9-catalog-content\") pod \"redhat-marketplace-hvlwz\" (UID: \"b1ea5bc6-a184-4e5f-9033-80d78eb736a9\") " pod="openshift-marketplace/redhat-marketplace-hvlwz" Oct 07 08:10:28 crc kubenswrapper[4875]: I1007 08:10:28.657397 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tt84q\" (UniqueName: \"kubernetes.io/projected/b1ea5bc6-a184-4e5f-9033-80d78eb736a9-kube-api-access-tt84q\") pod \"redhat-marketplace-hvlwz\" (UID: \"b1ea5bc6-a184-4e5f-9033-80d78eb736a9\") " pod="openshift-marketplace/redhat-marketplace-hvlwz" Oct 07 08:10:28 crc kubenswrapper[4875]: I1007 08:10:28.657578 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b1ea5bc6-a184-4e5f-9033-80d78eb736a9-utilities\") pod \"redhat-marketplace-hvlwz\" (UID: \"b1ea5bc6-a184-4e5f-9033-80d78eb736a9\") " pod="openshift-marketplace/redhat-marketplace-hvlwz" Oct 07 08:10:28 crc kubenswrapper[4875]: I1007 08:10:28.658213 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b1ea5bc6-a184-4e5f-9033-80d78eb736a9-utilities\") pod \"redhat-marketplace-hvlwz\" (UID: \"b1ea5bc6-a184-4e5f-9033-80d78eb736a9\") " pod="openshift-marketplace/redhat-marketplace-hvlwz" Oct 07 08:10:28 crc kubenswrapper[4875]: I1007 08:10:28.658925 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b1ea5bc6-a184-4e5f-9033-80d78eb736a9-catalog-content\") pod \"redhat-marketplace-hvlwz\" (UID: \"b1ea5bc6-a184-4e5f-9033-80d78eb736a9\") " pod="openshift-marketplace/redhat-marketplace-hvlwz" Oct 07 08:10:28 crc kubenswrapper[4875]: I1007 08:10:28.685553 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tt84q\" (UniqueName: \"kubernetes.io/projected/b1ea5bc6-a184-4e5f-9033-80d78eb736a9-kube-api-access-tt84q\") pod \"redhat-marketplace-hvlwz\" (UID: \"b1ea5bc6-a184-4e5f-9033-80d78eb736a9\") " pod="openshift-marketplace/redhat-marketplace-hvlwz" Oct 07 08:10:28 crc kubenswrapper[4875]: I1007 08:10:28.842431 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-hvlwz" Oct 07 08:10:29 crc kubenswrapper[4875]: I1007 08:10:29.710970 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="90a5c426-84c6-4d3c-a161-a78990842890" path="/var/lib/kubelet/pods/90a5c426-84c6-4d3c-a161-a78990842890/volumes" Oct 07 08:10:32 crc kubenswrapper[4875]: I1007 08:10:32.102032 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-rxtrt"] Oct 07 08:10:32 crc kubenswrapper[4875]: I1007 08:10:32.104052 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-rxtrt" Oct 07 08:10:32 crc kubenswrapper[4875]: I1007 08:10:32.116645 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-rxtrt"] Oct 07 08:10:32 crc kubenswrapper[4875]: I1007 08:10:32.117187 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rbgj2\" (UniqueName: \"kubernetes.io/projected/2e73a0eb-ebc7-46a8-bde5-8cd8febc5676-kube-api-access-rbgj2\") pod \"community-operators-rxtrt\" (UID: \"2e73a0eb-ebc7-46a8-bde5-8cd8febc5676\") " pod="openshift-marketplace/community-operators-rxtrt" Oct 07 08:10:32 crc kubenswrapper[4875]: I1007 08:10:32.117292 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2e73a0eb-ebc7-46a8-bde5-8cd8febc5676-utilities\") pod \"community-operators-rxtrt\" (UID: \"2e73a0eb-ebc7-46a8-bde5-8cd8febc5676\") " pod="openshift-marketplace/community-operators-rxtrt" Oct 07 08:10:32 crc kubenswrapper[4875]: I1007 08:10:32.117321 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2e73a0eb-ebc7-46a8-bde5-8cd8febc5676-catalog-content\") pod \"community-operators-rxtrt\" (UID: \"2e73a0eb-ebc7-46a8-bde5-8cd8febc5676\") " pod="openshift-marketplace/community-operators-rxtrt" Oct 07 08:10:32 crc kubenswrapper[4875]: I1007 08:10:32.218584 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2e73a0eb-ebc7-46a8-bde5-8cd8febc5676-utilities\") pod \"community-operators-rxtrt\" (UID: \"2e73a0eb-ebc7-46a8-bde5-8cd8febc5676\") " pod="openshift-marketplace/community-operators-rxtrt" Oct 07 08:10:32 crc kubenswrapper[4875]: I1007 08:10:32.218641 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2e73a0eb-ebc7-46a8-bde5-8cd8febc5676-catalog-content\") pod \"community-operators-rxtrt\" (UID: \"2e73a0eb-ebc7-46a8-bde5-8cd8febc5676\") " pod="openshift-marketplace/community-operators-rxtrt" Oct 07 08:10:32 crc kubenswrapper[4875]: I1007 08:10:32.219282 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2e73a0eb-ebc7-46a8-bde5-8cd8febc5676-utilities\") pod \"community-operators-rxtrt\" (UID: \"2e73a0eb-ebc7-46a8-bde5-8cd8febc5676\") " pod="openshift-marketplace/community-operators-rxtrt" Oct 07 08:10:32 crc kubenswrapper[4875]: I1007 08:10:32.219343 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2e73a0eb-ebc7-46a8-bde5-8cd8febc5676-catalog-content\") pod \"community-operators-rxtrt\" (UID: \"2e73a0eb-ebc7-46a8-bde5-8cd8febc5676\") " pod="openshift-marketplace/community-operators-rxtrt" Oct 07 08:10:32 crc kubenswrapper[4875]: I1007 08:10:32.219434 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rbgj2\" (UniqueName: \"kubernetes.io/projected/2e73a0eb-ebc7-46a8-bde5-8cd8febc5676-kube-api-access-rbgj2\") pod \"community-operators-rxtrt\" (UID: \"2e73a0eb-ebc7-46a8-bde5-8cd8febc5676\") " pod="openshift-marketplace/community-operators-rxtrt" Oct 07 08:10:32 crc kubenswrapper[4875]: I1007 08:10:32.239282 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rbgj2\" (UniqueName: \"kubernetes.io/projected/2e73a0eb-ebc7-46a8-bde5-8cd8febc5676-kube-api-access-rbgj2\") pod \"community-operators-rxtrt\" (UID: \"2e73a0eb-ebc7-46a8-bde5-8cd8febc5676\") " pod="openshift-marketplace/community-operators-rxtrt" Oct 07 08:10:32 crc kubenswrapper[4875]: I1007 08:10:32.461784 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-rxtrt" Oct 07 08:10:34 crc kubenswrapper[4875]: I1007 08:10:34.935671 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-manager-667f8c4c67-cltql" Oct 07 08:10:39 crc kubenswrapper[4875]: E1007 08:10:39.241709 4875 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/octavia-operator@sha256:da5c3078d80878d66c616e6f8a0bb909f95d971cde2c612f96fded064113e182" Oct 07 08:10:39 crc kubenswrapper[4875]: E1007 08:10:39.242680 4875 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/octavia-operator@sha256:da5c3078d80878d66c616e6f8a0bb909f95d971cde2c612f96fded064113e182,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-nm855,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod octavia-operator-controller-manager-7468f855d8-wr55g_openstack-operators(d848105d-5f25-435d-bc92-fc6f9eac9749): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 07 08:10:39 crc kubenswrapper[4875]: E1007 08:10:39.620229 4875 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/test-operator@sha256:0daf76cc40ab619ae266b11defcc1b65beb22d859369e7b1b04de9169089a4cb" Oct 07 08:10:39 crc kubenswrapper[4875]: E1007 08:10:39.620505 4875 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/test-operator@sha256:0daf76cc40ab619ae266b11defcc1b65beb22d859369e7b1b04de9169089a4cb,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-zc2w2,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod test-operator-controller-manager-5cd5cb47d7-l8gnr_openstack-operators(c57705e3-3b4e-4252-8c8d-0a21084ff5d8): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 07 08:10:40 crc kubenswrapper[4875]: E1007 08:10:40.064769 4875 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/placement-operator@sha256:adc23c5fd1aece2b16dc8e22ceed628f9a719455e39d3f98c77544665c6749e1" Oct 07 08:10:40 crc kubenswrapper[4875]: E1007 08:10:40.065025 4875 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/placement-operator@sha256:adc23c5fd1aece2b16dc8e22ceed628f9a719455e39d3f98c77544665c6749e1,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-vvdrx,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod placement-operator-controller-manager-54689d9f88-8b99x_openstack-operators(9e09310b-4437-4e30-881f-ed2dd568aa16): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 07 08:10:40 crc kubenswrapper[4875]: E1007 08:10:40.679682 4875 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/infra-operator@sha256:b6cef68bfaacdf992a9fa1a6b03a848a48c18cbb6ed12d95561b4b37d858b99f" Oct 07 08:10:40 crc kubenswrapper[4875]: E1007 08:10:40.680239 4875 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/infra-operator@sha256:b6cef68bfaacdf992a9fa1a6b03a848a48c18cbb6ed12d95561b4b37d858b99f,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:true,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{600 -3} {} 600m DecimalSI},memory: {{2147483648 0} {} 2Gi BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{536870912 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:cert,ReadOnly:true,MountPath:/tmp/k8s-webhook-server/serving-certs,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-bqwjf,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod infra-operator-controller-manager-658588b8c9-l77jd_openstack-operators(0cce3a76-3617-40be-8d2b-b8f9184e6b61): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 07 08:10:41 crc kubenswrapper[4875]: E1007 08:10:41.075229 4875 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/manila-operator@sha256:063aae1458289d1090a77c74c2b978b9eb978b0e4062c399f0cb5434a8dd2757" Oct 07 08:10:41 crc kubenswrapper[4875]: E1007 08:10:41.075557 4875 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/manila-operator@sha256:063aae1458289d1090a77c74c2b978b9eb978b0e4062c399f0cb5434a8dd2757,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-264qm,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod manila-operator-controller-manager-65d89cfd9f-zwhfp_openstack-operators(45a1f3f1-6fbc-47d9-81c4-77e8ede46dd8): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 07 08:10:41 crc kubenswrapper[4875]: E1007 08:10:41.533283 4875 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/keystone-operator@sha256:a6f1dcab931fd4b818010607ede65150742563b3c81a3ad3d739ef7953cace0b" Oct 07 08:10:41 crc kubenswrapper[4875]: E1007 08:10:41.533863 4875 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/keystone-operator@sha256:a6f1dcab931fd4b818010607ede65150742563b3c81a3ad3d739ef7953cace0b,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-pqdmk,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod keystone-operator-controller-manager-7b5ccf6d9c-lvqtd_openstack-operators(ab712745-89ea-43bb-b2d7-7192d3691acf): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 07 08:10:42 crc kubenswrapper[4875]: E1007 08:10:42.082243 4875 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/telemetry-operator@sha256:bf55026ba10b80e1e24733078bd204cef8766d21a305fd000707a1e3b30ff52e" Oct 07 08:10:42 crc kubenswrapper[4875]: E1007 08:10:42.082510 4875 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/telemetry-operator@sha256:bf55026ba10b80e1e24733078bd204cef8766d21a305fd000707a1e3b30ff52e,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-mrwvx,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod telemetry-operator-controller-manager-5d4d74dd89-z98x2_openstack-operators(4aab0800-b79f-42f6-8d27-ce34e631f086): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 07 08:10:42 crc kubenswrapper[4875]: E1007 08:10:42.619689 4875 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/ovn-operator@sha256:f37e29d1f621c23c0d77b09076006d1e8002a77c2ff3d9b8921f893221cb1d09" Oct 07 08:10:42 crc kubenswrapper[4875]: E1007 08:10:42.619946 4875 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/ovn-operator@sha256:f37e29d1f621c23c0d77b09076006d1e8002a77c2ff3d9b8921f893221cb1d09,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-6zxwl,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ovn-operator-controller-manager-6d8b6f9b9-dtxrv_openstack-operators(f5fb651c-9cca-4f7c-9136-37534358a8dd): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 07 08:10:43 crc kubenswrapper[4875]: E1007 08:10:43.053822 4875 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/horizon-operator@sha256:b6ab8fc3ad425eca2e073fe9ba9d5b29d9ea4d9814de7bb799fa330209566cd4" Oct 07 08:10:43 crc kubenswrapper[4875]: E1007 08:10:43.054110 4875 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/horizon-operator@sha256:b6ab8fc3ad425eca2e073fe9ba9d5b29d9ea4d9814de7bb799fa330209566cd4,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-sqp26,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod horizon-operator-controller-manager-76d5b87f47-g6cn6_openstack-operators(52379554-390c-4cb2-97ae-0cb0596f36d1): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 07 08:10:43 crc kubenswrapper[4875]: I1007 08:10:43.637438 4875 scope.go:117] "RemoveContainer" containerID="a61e3376bea0790df3254ee563a621cc7ec7688ea4723e948a1fb0ba42e88f48" Oct 07 08:10:45 crc kubenswrapper[4875]: I1007 08:10:45.406313 4875 scope.go:117] "RemoveContainer" containerID="cbdf5d950fbc0bbbc25cc503922a43c635066653179d0d679f6e089e2190767e" Oct 07 08:10:45 crc kubenswrapper[4875]: I1007 08:10:45.725714 4875 scope.go:117] "RemoveContainer" containerID="309f4340b4a300593d103859c961934691a5d9e3e4146558a05e42adfb6201c6" Oct 07 08:10:45 crc kubenswrapper[4875]: E1007 08:10:45.726861 4875 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"309f4340b4a300593d103859c961934691a5d9e3e4146558a05e42adfb6201c6\": container with ID starting with 309f4340b4a300593d103859c961934691a5d9e3e4146558a05e42adfb6201c6 not found: ID does not exist" containerID="309f4340b4a300593d103859c961934691a5d9e3e4146558a05e42adfb6201c6" Oct 07 08:10:45 crc kubenswrapper[4875]: I1007 08:10:45.726988 4875 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"309f4340b4a300593d103859c961934691a5d9e3e4146558a05e42adfb6201c6"} err="failed to get container status \"309f4340b4a300593d103859c961934691a5d9e3e4146558a05e42adfb6201c6\": rpc error: code = NotFound desc = could not find container \"309f4340b4a300593d103859c961934691a5d9e3e4146558a05e42adfb6201c6\": container with ID starting with 309f4340b4a300593d103859c961934691a5d9e3e4146558a05e42adfb6201c6 not found: ID does not exist" Oct 07 08:10:45 crc kubenswrapper[4875]: I1007 08:10:45.727020 4875 scope.go:117] "RemoveContainer" containerID="a61e3376bea0790df3254ee563a621cc7ec7688ea4723e948a1fb0ba42e88f48" Oct 07 08:10:45 crc kubenswrapper[4875]: E1007 08:10:45.727404 4875 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a61e3376bea0790df3254ee563a621cc7ec7688ea4723e948a1fb0ba42e88f48\": container with ID starting with a61e3376bea0790df3254ee563a621cc7ec7688ea4723e948a1fb0ba42e88f48 not found: ID does not exist" containerID="a61e3376bea0790df3254ee563a621cc7ec7688ea4723e948a1fb0ba42e88f48" Oct 07 08:10:45 crc kubenswrapper[4875]: I1007 08:10:45.727461 4875 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a61e3376bea0790df3254ee563a621cc7ec7688ea4723e948a1fb0ba42e88f48"} err="failed to get container status \"a61e3376bea0790df3254ee563a621cc7ec7688ea4723e948a1fb0ba42e88f48\": rpc error: code = NotFound desc = could not find container \"a61e3376bea0790df3254ee563a621cc7ec7688ea4723e948a1fb0ba42e88f48\": container with ID starting with a61e3376bea0790df3254ee563a621cc7ec7688ea4723e948a1fb0ba42e88f48 not found: ID does not exist" Oct 07 08:10:45 crc kubenswrapper[4875]: I1007 08:10:45.727493 4875 scope.go:117] "RemoveContainer" containerID="cbdf5d950fbc0bbbc25cc503922a43c635066653179d0d679f6e089e2190767e" Oct 07 08:10:45 crc kubenswrapper[4875]: E1007 08:10:45.727717 4875 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cbdf5d950fbc0bbbc25cc503922a43c635066653179d0d679f6e089e2190767e\": container with ID starting with cbdf5d950fbc0bbbc25cc503922a43c635066653179d0d679f6e089e2190767e not found: ID does not exist" containerID="cbdf5d950fbc0bbbc25cc503922a43c635066653179d0d679f6e089e2190767e" Oct 07 08:10:45 crc kubenswrapper[4875]: I1007 08:10:45.727735 4875 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cbdf5d950fbc0bbbc25cc503922a43c635066653179d0d679f6e089e2190767e"} err="failed to get container status \"cbdf5d950fbc0bbbc25cc503922a43c635066653179d0d679f6e089e2190767e\": rpc error: code = NotFound desc = could not find container \"cbdf5d950fbc0bbbc25cc503922a43c635066653179d0d679f6e089e2190767e\": container with ID starting with cbdf5d950fbc0bbbc25cc503922a43c635066653179d0d679f6e089e2190767e not found: ID does not exist" Oct 07 08:10:45 crc kubenswrapper[4875]: E1007 08:10:45.733698 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/octavia-operator-controller-manager-7468f855d8-wr55g" podUID="d848105d-5f25-435d-bc92-fc6f9eac9749" Oct 07 08:10:45 crc kubenswrapper[4875]: E1007 08:10:45.742096 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/placement-operator-controller-manager-54689d9f88-8b99x" podUID="9e09310b-4437-4e30-881f-ed2dd568aa16" Oct 07 08:10:45 crc kubenswrapper[4875]: E1007 08:10:45.778448 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/keystone-operator-controller-manager-7b5ccf6d9c-lvqtd" podUID="ab712745-89ea-43bb-b2d7-7192d3691acf" Oct 07 08:10:45 crc kubenswrapper[4875]: I1007 08:10:45.797409 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-hvlwz"] Oct 07 08:10:45 crc kubenswrapper[4875]: W1007 08:10:45.840188 4875 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb1ea5bc6_a184_4e5f_9033_80d78eb736a9.slice/crio-f6109359d710263272e944cd162478a8aa69e2d690a2df652113a3b3164ae600 WatchSource:0}: Error finding container f6109359d710263272e944cd162478a8aa69e2d690a2df652113a3b3164ae600: Status 404 returned error can't find the container with id f6109359d710263272e944cd162478a8aa69e2d690a2df652113a3b3164ae600 Oct 07 08:10:45 crc kubenswrapper[4875]: E1007 08:10:45.905956 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/horizon-operator-controller-manager-76d5b87f47-g6cn6" podUID="52379554-390c-4cb2-97ae-0cb0596f36d1" Oct 07 08:10:45 crc kubenswrapper[4875]: E1007 08:10:45.906230 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/test-operator-controller-manager-5cd5cb47d7-l8gnr" podUID="c57705e3-3b4e-4252-8c8d-0a21084ff5d8" Oct 07 08:10:45 crc kubenswrapper[4875]: I1007 08:10:45.954300 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-rxtrt"] Oct 07 08:10:45 crc kubenswrapper[4875]: E1007 08:10:45.988042 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/telemetry-operator-controller-manager-5d4d74dd89-z98x2" podUID="4aab0800-b79f-42f6-8d27-ce34e631f086" Oct 07 08:10:45 crc kubenswrapper[4875]: E1007 08:10:45.995265 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/infra-operator-controller-manager-658588b8c9-l77jd" podUID="0cce3a76-3617-40be-8d2b-b8f9184e6b61" Oct 07 08:10:46 crc kubenswrapper[4875]: E1007 08:10:46.027781 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/ovn-operator-controller-manager-6d8b6f9b9-dtxrv" podUID="f5fb651c-9cca-4f7c-9136-37534358a8dd" Oct 07 08:10:46 crc kubenswrapper[4875]: E1007 08:10:46.133318 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/manila-operator-controller-manager-65d89cfd9f-zwhfp" podUID="45a1f3f1-6fbc-47d9-81c4-77e8ede46dd8" Oct 07 08:10:46 crc kubenswrapper[4875]: I1007 08:10:46.428470 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-75dfd9b554-n9gbs" event={"ID":"0ef97224-fe40-43a3-af95-f4e1986b8fbe","Type":"ContainerStarted","Data":"c46b95482dff69a08dd877648b701b70ac5b66a377d355a3719a0696b0cc3cf0"} Oct 07 08:10:46 crc kubenswrapper[4875]: I1007 08:10:46.442087 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-6d8b6f9b9-dtxrv" event={"ID":"f5fb651c-9cca-4f7c-9136-37534358a8dd","Type":"ContainerStarted","Data":"cc3585493e96f1463e9718f0c59ea5d7a43047cfc01d1db95e1d5d875cb12b36"} Oct 07 08:10:46 crc kubenswrapper[4875]: E1007 08:10:46.450057 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/ovn-operator@sha256:f37e29d1f621c23c0d77b09076006d1e8002a77c2ff3d9b8921f893221cb1d09\\\"\"" pod="openstack-operators/ovn-operator-controller-manager-6d8b6f9b9-dtxrv" podUID="f5fb651c-9cca-4f7c-9136-37534358a8dd" Oct 07 08:10:46 crc kubenswrapper[4875]: I1007 08:10:46.457955 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-7d4d4f8d-8chkp" event={"ID":"ecbab85b-669d-4669-aa95-597dc630b7e6","Type":"ContainerStarted","Data":"2db409cf8c972ec710bb0f25e57e9d30357a2d1474b041b91ac54a539eb2906a"} Oct 07 08:10:46 crc kubenswrapper[4875]: I1007 08:10:46.467738 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rxtrt" event={"ID":"2e73a0eb-ebc7-46a8-bde5-8cd8febc5676","Type":"ContainerStarted","Data":"81ca85165d6f8af4712011669a050045fd78c88a2dc46a428d57e8ede0c4b5a0"} Oct 07 08:10:46 crc kubenswrapper[4875]: I1007 08:10:46.521019 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-65d89cfd9f-zwhfp" event={"ID":"45a1f3f1-6fbc-47d9-81c4-77e8ede46dd8","Type":"ContainerStarted","Data":"f64acec8bc771e5b3c6169c0468dc091f5ab18d03e2a2739a2e37b5d6593249f"} Oct 07 08:10:46 crc kubenswrapper[4875]: E1007 08:10:46.523589 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/manila-operator@sha256:063aae1458289d1090a77c74c2b978b9eb978b0e4062c399f0cb5434a8dd2757\\\"\"" pod="openstack-operators/manila-operator-controller-manager-65d89cfd9f-zwhfp" podUID="45a1f3f1-6fbc-47d9-81c4-77e8ede46dd8" Oct 07 08:10:46 crc kubenswrapper[4875]: I1007 08:10:46.532911 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-54689d9f88-8b99x" event={"ID":"9e09310b-4437-4e30-881f-ed2dd568aa16","Type":"ContainerStarted","Data":"f5e2143c4a3790fe35173418c278affd86bda497cc9dc603da27fa77f42ff045"} Oct 07 08:10:46 crc kubenswrapper[4875]: E1007 08:10:46.535855 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/placement-operator@sha256:adc23c5fd1aece2b16dc8e22ceed628f9a719455e39d3f98c77544665c6749e1\\\"\"" pod="openstack-operators/placement-operator-controller-manager-54689d9f88-8b99x" podUID="9e09310b-4437-4e30-881f-ed2dd568aa16" Oct 07 08:10:46 crc kubenswrapper[4875]: I1007 08:10:46.542490 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-hvlwz" event={"ID":"b1ea5bc6-a184-4e5f-9033-80d78eb736a9","Type":"ContainerStarted","Data":"f6109359d710263272e944cd162478a8aa69e2d690a2df652113a3b3164ae600"} Oct 07 08:10:46 crc kubenswrapper[4875]: I1007 08:10:46.563865 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-76d5b87f47-g6cn6" event={"ID":"52379554-390c-4cb2-97ae-0cb0596f36d1","Type":"ContainerStarted","Data":"7c17acb2d80a7fc9068786241c87bb3263bc8a3589b8461a85f55a85014600ca"} Oct 07 08:10:46 crc kubenswrapper[4875]: E1007 08:10:46.567250 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/horizon-operator@sha256:b6ab8fc3ad425eca2e073fe9ba9d5b29d9ea4d9814de7bb799fa330209566cd4\\\"\"" pod="openstack-operators/horizon-operator-controller-manager-76d5b87f47-g6cn6" podUID="52379554-390c-4cb2-97ae-0cb0596f36d1" Oct 07 08:10:46 crc kubenswrapper[4875]: I1007 08:10:46.577323 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-58c4cd55f4-tslhs" event={"ID":"cfd6de96-f3bd-4f0d-a076-f7c748b9b6a5","Type":"ContainerStarted","Data":"1cdd3c6ead6323010a9baf0ba638d08e0c05ab86824a5f7709d6ed3fa683f78c"} Oct 07 08:10:46 crc kubenswrapper[4875]: I1007 08:10:46.624325 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-rljtx" event={"ID":"1b1c1a64-b0a5-4c2f-a43d-7cde7774094a","Type":"ContainerStarted","Data":"fdfe1a73306df10bd2886fd534e3ff7bc60fc99c105af38771cf22512afb550d"} Oct 07 08:10:46 crc kubenswrapper[4875]: I1007 08:10:46.653631 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-6cd6d7bdf5-n4t9j" event={"ID":"d9b65818-b46c-4a3d-8ed2-53d04e3dc834","Type":"ContainerStarted","Data":"89ac1db5f89892a3bf2e5a247da2a4234c2330cff2352e37c263b5205f01b855"} Oct 07 08:10:46 crc kubenswrapper[4875]: I1007 08:10:46.664267 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-54b4974c45-t97rc" event={"ID":"0c4de86e-6c46-4292-8c13-faeff0997ac4","Type":"ContainerStarted","Data":"12b3b34ef83f340c9933a99e81ab87fec5a267a0ff49f33a322d279784976fce"} Oct 07 08:10:46 crc kubenswrapper[4875]: I1007 08:10:46.665347 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-rljtx" podStartSLOduration=3.855896021 podStartE2EDuration="23.665323242s" podCreationTimestamp="2025-10-07 08:10:23 +0000 UTC" firstStartedPulling="2025-10-07 08:10:25.722798593 +0000 UTC m=+850.682569136" lastFinishedPulling="2025-10-07 08:10:45.532225804 +0000 UTC m=+870.491996357" observedRunningTime="2025-10-07 08:10:46.66491896 +0000 UTC m=+871.624689503" watchObservedRunningTime="2025-10-07 08:10:46.665323242 +0000 UTC m=+871.625093785" Oct 07 08:10:46 crc kubenswrapper[4875]: I1007 08:10:46.704107 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-5cd5cb47d7-l8gnr" event={"ID":"c57705e3-3b4e-4252-8c8d-0a21084ff5d8","Type":"ContainerStarted","Data":"82753a9f95305ce5108cbebce2aff0cd585ff3768b3250733ecbd23660272a38"} Oct 07 08:10:46 crc kubenswrapper[4875]: E1007 08:10:46.713801 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/test-operator@sha256:0daf76cc40ab619ae266b11defcc1b65beb22d859369e7b1b04de9169089a4cb\\\"\"" pod="openstack-operators/test-operator-controller-manager-5cd5cb47d7-l8gnr" podUID="c57705e3-3b4e-4252-8c8d-0a21084ff5d8" Oct 07 08:10:46 crc kubenswrapper[4875]: I1007 08:10:46.720530 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-5dfbbd665cxk99t" event={"ID":"052c6076-1098-41ac-a80d-11879a2e08bc","Type":"ContainerStarted","Data":"20e8402b1f9615d73a82da1be631d1d9e0278696f9e84a1792676a279d92d7a1"} Oct 07 08:10:46 crc kubenswrapper[4875]: I1007 08:10:46.727520 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-7468f855d8-wr55g" event={"ID":"d848105d-5f25-435d-bc92-fc6f9eac9749","Type":"ContainerStarted","Data":"3de38e97798bb8968b060875094e33463a69d739c7a271fef0f170efc588db79"} Oct 07 08:10:46 crc kubenswrapper[4875]: E1007 08:10:46.735542 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/octavia-operator@sha256:da5c3078d80878d66c616e6f8a0bb909f95d971cde2c612f96fded064113e182\\\"\"" pod="openstack-operators/octavia-operator-controller-manager-7468f855d8-wr55g" podUID="d848105d-5f25-435d-bc92-fc6f9eac9749" Oct 07 08:10:46 crc kubenswrapper[4875]: I1007 08:10:46.761254 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-7b5ccf6d9c-lvqtd" event={"ID":"ab712745-89ea-43bb-b2d7-7192d3691acf","Type":"ContainerStarted","Data":"46a2cc1fcd86dae9f557301c9c698b5b3f812b912b0af7173b9660d79356de2a"} Oct 07 08:10:46 crc kubenswrapper[4875]: E1007 08:10:46.764214 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/keystone-operator@sha256:a6f1dcab931fd4b818010607ede65150742563b3c81a3ad3d739ef7953cace0b\\\"\"" pod="openstack-operators/keystone-operator-controller-manager-7b5ccf6d9c-lvqtd" podUID="ab712745-89ea-43bb-b2d7-7192d3691acf" Oct 07 08:10:46 crc kubenswrapper[4875]: I1007 08:10:46.774020 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-5d4d74dd89-z98x2" event={"ID":"4aab0800-b79f-42f6-8d27-ce34e631f086","Type":"ContainerStarted","Data":"d3c16bdcfab269d1d88fd126b759bb3d909a1d5ded5a23cfa152320d0f541570"} Oct 07 08:10:46 crc kubenswrapper[4875]: E1007 08:10:46.826596 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/telemetry-operator@sha256:bf55026ba10b80e1e24733078bd204cef8766d21a305fd000707a1e3b30ff52e\\\"\"" pod="openstack-operators/telemetry-operator-controller-manager-5d4d74dd89-z98x2" podUID="4aab0800-b79f-42f6-8d27-ce34e631f086" Oct 07 08:10:46 crc kubenswrapper[4875]: I1007 08:10:46.831622 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-658588b8c9-l77jd" event={"ID":"0cce3a76-3617-40be-8d2b-b8f9184e6b61","Type":"ContainerStarted","Data":"8836097cd97fac03b90d055a8fdcc96a5093515ccee678f112d133ec4abacf0d"} Oct 07 08:10:46 crc kubenswrapper[4875]: E1007 08:10:46.837640 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/infra-operator@sha256:b6cef68bfaacdf992a9fa1a6b03a848a48c18cbb6ed12d95561b4b37d858b99f\\\"\"" pod="openstack-operators/infra-operator-controller-manager-658588b8c9-l77jd" podUID="0cce3a76-3617-40be-8d2b-b8f9184e6b61" Oct 07 08:10:46 crc kubenswrapper[4875]: I1007 08:10:46.871825 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-5dc44df7d5-zpbdj" event={"ID":"b9617ed2-0ac9-45b8-8089-1091ff8937dd","Type":"ContainerStarted","Data":"838a80092f32bb266b27311f47bab2b22166d6766cdf9e84884903ac44faac3b"} Oct 07 08:10:47 crc kubenswrapper[4875]: I1007 08:10:47.882112 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-54b4974c45-t97rc" event={"ID":"0c4de86e-6c46-4292-8c13-faeff0997ac4","Type":"ContainerStarted","Data":"7516900de7452d158d469e0b1f661e4af3e95fab09b700e0be6bf3a4668c8cbc"} Oct 07 08:10:47 crc kubenswrapper[4875]: I1007 08:10:47.883645 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/heat-operator-controller-manager-54b4974c45-t97rc" Oct 07 08:10:47 crc kubenswrapper[4875]: I1007 08:10:47.883922 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/barbican-operator-controller-manager-58c4cd55f4-tslhs" Oct 07 08:10:47 crc kubenswrapper[4875]: I1007 08:10:47.884200 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-58c4cd55f4-tslhs" event={"ID":"cfd6de96-f3bd-4f0d-a076-f7c748b9b6a5","Type":"ContainerStarted","Data":"fed2e0410414057cbe952512271bf766f1a15a90c35a837e02f7b1a2c36873bf"} Oct 07 08:10:47 crc kubenswrapper[4875]: I1007 08:10:47.885730 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-7c7fc454ff-2s4ks" event={"ID":"7b5bb2f7-a322-4af4-81dc-d9b104b2bb85","Type":"ContainerStarted","Data":"0d7b16a1f95ba55fa5b189c031d995e300d4d4d25c94ef28d916711b2cb37335"} Oct 07 08:10:47 crc kubenswrapper[4875]: I1007 08:10:47.885812 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/nova-operator-controller-manager-7c7fc454ff-2s4ks" Oct 07 08:10:47 crc kubenswrapper[4875]: I1007 08:10:47.885826 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-7c7fc454ff-2s4ks" event={"ID":"7b5bb2f7-a322-4af4-81dc-d9b104b2bb85","Type":"ContainerStarted","Data":"63547fa464717b099ebd5ea8340fa1bcf8d8e12f0c56549afda211d406947856"} Oct 07 08:10:47 crc kubenswrapper[4875]: I1007 08:10:47.887660 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-649675d675-6vp52" event={"ID":"f03c0528-11ac-4b93-8f46-4415192ba694","Type":"ContainerStarted","Data":"93cd4962429be0f8a6dd70d359ad4fea9d877ad4cf9872ed9d7d089e8c267b86"} Oct 07 08:10:47 crc kubenswrapper[4875]: I1007 08:10:47.887757 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-649675d675-6vp52" event={"ID":"f03c0528-11ac-4b93-8f46-4415192ba694","Type":"ContainerStarted","Data":"fb963970affbd3b3adcbababe601d29ee5aec4ee37c54876a213e2ffd908c9a9"} Oct 07 08:10:47 crc kubenswrapper[4875]: I1007 08:10:47.888937 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ironic-operator-controller-manager-649675d675-6vp52" Oct 07 08:10:47 crc kubenswrapper[4875]: I1007 08:10:47.890081 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-75dfd9b554-n9gbs" event={"ID":"0ef97224-fe40-43a3-af95-f4e1986b8fbe","Type":"ContainerStarted","Data":"92b99a701339a3dbe46849bdd0364508217bab0cfa6d4ac183fb06cbbcd0aa61"} Oct 07 08:10:47 crc kubenswrapper[4875]: I1007 08:10:47.890853 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/designate-operator-controller-manager-75dfd9b554-n9gbs" Oct 07 08:10:47 crc kubenswrapper[4875]: I1007 08:10:47.894506 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-7d4d4f8d-8chkp" event={"ID":"ecbab85b-669d-4669-aa95-597dc630b7e6","Type":"ContainerStarted","Data":"7005ca9062c603cd071f459c194762fc347df3ea831ee2b99162072ec6e3c9e2"} Oct 07 08:10:47 crc kubenswrapper[4875]: I1007 08:10:47.894671 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/cinder-operator-controller-manager-7d4d4f8d-8chkp" Oct 07 08:10:47 crc kubenswrapper[4875]: I1007 08:10:47.907106 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-8d984cc4d-jcb62" event={"ID":"04963c19-bc7b-41c9-8b48-e3b8653738a8","Type":"ContainerStarted","Data":"e0faa6038c028ff49f9ae87570dd4f01689d75753de87eff922c52a653c6fc6e"} Oct 07 08:10:47 crc kubenswrapper[4875]: I1007 08:10:47.907825 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/neutron-operator-controller-manager-8d984cc4d-jcb62" Oct 07 08:10:47 crc kubenswrapper[4875]: I1007 08:10:47.921363 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/heat-operator-controller-manager-54b4974c45-t97rc" podStartSLOduration=7.128878314 podStartE2EDuration="25.921339876s" podCreationTimestamp="2025-10-07 08:10:22 +0000 UTC" firstStartedPulling="2025-10-07 08:10:24.811385358 +0000 UTC m=+849.771155901" lastFinishedPulling="2025-10-07 08:10:43.60384692 +0000 UTC m=+868.563617463" observedRunningTime="2025-10-07 08:10:47.915077357 +0000 UTC m=+872.874847920" watchObservedRunningTime="2025-10-07 08:10:47.921339876 +0000 UTC m=+872.881110419" Oct 07 08:10:47 crc kubenswrapper[4875]: I1007 08:10:47.921556 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-6cbc6dd547-9njzw" event={"ID":"d4352aef-6eec-4342-8b1f-67a0bf3459f2","Type":"ContainerStarted","Data":"eb7cd04b8bd5ffc771c9ac2cef6e6605eb898acc284c1692d4dcabd60bb942e0"} Oct 07 08:10:47 crc kubenswrapper[4875]: I1007 08:10:47.922646 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/watcher-operator-controller-manager-6cbc6dd547-9njzw" Oct 07 08:10:47 crc kubenswrapper[4875]: I1007 08:10:47.943097 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/barbican-operator-controller-manager-58c4cd55f4-tslhs" podStartSLOduration=6.472254038 podStartE2EDuration="25.943066921s" podCreationTimestamp="2025-10-07 08:10:22 +0000 UTC" firstStartedPulling="2025-10-07 08:10:24.132806481 +0000 UTC m=+849.092577024" lastFinishedPulling="2025-10-07 08:10:43.603619364 +0000 UTC m=+868.563389907" observedRunningTime="2025-10-07 08:10:47.93904075 +0000 UTC m=+872.898811313" watchObservedRunningTime="2025-10-07 08:10:47.943066921 +0000 UTC m=+872.902837464" Oct 07 08:10:47 crc kubenswrapper[4875]: I1007 08:10:47.943468 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-5dc44df7d5-zpbdj" event={"ID":"b9617ed2-0ac9-45b8-8089-1091ff8937dd","Type":"ContainerStarted","Data":"d3564a72051cd5b838edf2c5f2dca6d76304235a62b15fbd118355539a175bc6"} Oct 07 08:10:47 crc kubenswrapper[4875]: I1007 08:10:47.953868 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/glance-operator-controller-manager-5dc44df7d5-zpbdj" Oct 07 08:10:47 crc kubenswrapper[4875]: I1007 08:10:47.955400 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-5dfbbd665cxk99t" event={"ID":"052c6076-1098-41ac-a80d-11879a2e08bc","Type":"ContainerStarted","Data":"347da562cddfe3a3d4d43284ba69f31cb66abf50947cbc95d7812512f2c53f8d"} Oct 07 08:10:47 crc kubenswrapper[4875]: I1007 08:10:47.956028 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-baremetal-operator-controller-manager-5dfbbd665cxk99t" Oct 07 08:10:47 crc kubenswrapper[4875]: I1007 08:10:47.962721 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-6859f9b676-gfr8f" event={"ID":"91189044-d565-4a5e-9766-1bd11f300f11","Type":"ContainerStarted","Data":"74d60f3385f93d52b009244e671e6a54564789603566cf850fed135748564482"} Oct 07 08:10:47 crc kubenswrapper[4875]: I1007 08:10:47.963017 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/swift-operator-controller-manager-6859f9b676-gfr8f" Oct 07 08:10:47 crc kubenswrapper[4875]: I1007 08:10:47.964115 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/neutron-operator-controller-manager-8d984cc4d-jcb62" podStartSLOduration=5.79703216 podStartE2EDuration="24.964094485s" podCreationTimestamp="2025-10-07 08:10:23 +0000 UTC" firstStartedPulling="2025-10-07 08:10:25.704014918 +0000 UTC m=+850.663785461" lastFinishedPulling="2025-10-07 08:10:44.871077243 +0000 UTC m=+869.830847786" observedRunningTime="2025-10-07 08:10:47.961100485 +0000 UTC m=+872.920871048" watchObservedRunningTime="2025-10-07 08:10:47.964094485 +0000 UTC m=+872.923865048" Oct 07 08:10:47 crc kubenswrapper[4875]: I1007 08:10:47.970856 4875 generic.go:334] "Generic (PLEG): container finished" podID="b1ea5bc6-a184-4e5f-9033-80d78eb736a9" containerID="835ef2a6c276da855d64223aaba0636a2a1e02d14a039c5b5b2aad719d72ceea" exitCode=0 Oct 07 08:10:47 crc kubenswrapper[4875]: I1007 08:10:47.970979 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-hvlwz" event={"ID":"b1ea5bc6-a184-4e5f-9033-80d78eb736a9","Type":"ContainerDied","Data":"835ef2a6c276da855d64223aaba0636a2a1e02d14a039c5b5b2aad719d72ceea"} Oct 07 08:10:47 crc kubenswrapper[4875]: I1007 08:10:47.990111 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-6cd6d7bdf5-n4t9j" event={"ID":"d9b65818-b46c-4a3d-8ed2-53d04e3dc834","Type":"ContainerStarted","Data":"c0214cdf7a9138a67bf4b99952ee23430c6fd58d3145e565dd66f9bca7c1bfb9"} Oct 07 08:10:47 crc kubenswrapper[4875]: I1007 08:10:47.991080 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/mariadb-operator-controller-manager-6cd6d7bdf5-n4t9j" Oct 07 08:10:47 crc kubenswrapper[4875]: I1007 08:10:47.998101 4875 generic.go:334] "Generic (PLEG): container finished" podID="2e73a0eb-ebc7-46a8-bde5-8cd8febc5676" containerID="bb2c6bfc16a860a8fda09f6c7b9a654fbd9cd1ac379fa953af385bbc7ecadb91" exitCode=0 Oct 07 08:10:48 crc kubenswrapper[4875]: I1007 08:10:48.000477 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rxtrt" event={"ID":"2e73a0eb-ebc7-46a8-bde5-8cd8febc5676","Type":"ContainerDied","Data":"bb2c6bfc16a860a8fda09f6c7b9a654fbd9cd1ac379fa953af385bbc7ecadb91"} Oct 07 08:10:48 crc kubenswrapper[4875]: E1007 08:10:48.011554 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/placement-operator@sha256:adc23c5fd1aece2b16dc8e22ceed628f9a719455e39d3f98c77544665c6749e1\\\"\"" pod="openstack-operators/placement-operator-controller-manager-54689d9f88-8b99x" podUID="9e09310b-4437-4e30-881f-ed2dd568aa16" Oct 07 08:10:48 crc kubenswrapper[4875]: E1007 08:10:48.011631 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/ovn-operator@sha256:f37e29d1f621c23c0d77b09076006d1e8002a77c2ff3d9b8921f893221cb1d09\\\"\"" pod="openstack-operators/ovn-operator-controller-manager-6d8b6f9b9-dtxrv" podUID="f5fb651c-9cca-4f7c-9136-37534358a8dd" Oct 07 08:10:48 crc kubenswrapper[4875]: E1007 08:10:48.011684 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/manila-operator@sha256:063aae1458289d1090a77c74c2b978b9eb978b0e4062c399f0cb5434a8dd2757\\\"\"" pod="openstack-operators/manila-operator-controller-manager-65d89cfd9f-zwhfp" podUID="45a1f3f1-6fbc-47d9-81c4-77e8ede46dd8" Oct 07 08:10:48 crc kubenswrapper[4875]: E1007 08:10:48.011738 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/telemetry-operator@sha256:bf55026ba10b80e1e24733078bd204cef8766d21a305fd000707a1e3b30ff52e\\\"\"" pod="openstack-operators/telemetry-operator-controller-manager-5d4d74dd89-z98x2" podUID="4aab0800-b79f-42f6-8d27-ce34e631f086" Oct 07 08:10:48 crc kubenswrapper[4875]: E1007 08:10:48.011788 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/horizon-operator@sha256:b6ab8fc3ad425eca2e073fe9ba9d5b29d9ea4d9814de7bb799fa330209566cd4\\\"\"" pod="openstack-operators/horizon-operator-controller-manager-76d5b87f47-g6cn6" podUID="52379554-390c-4cb2-97ae-0cb0596f36d1" Oct 07 08:10:48 crc kubenswrapper[4875]: E1007 08:10:48.011839 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/infra-operator@sha256:b6cef68bfaacdf992a9fa1a6b03a848a48c18cbb6ed12d95561b4b37d858b99f\\\"\"" pod="openstack-operators/infra-operator-controller-manager-658588b8c9-l77jd" podUID="0cce3a76-3617-40be-8d2b-b8f9184e6b61" Oct 07 08:10:48 crc kubenswrapper[4875]: E1007 08:10:48.011912 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/octavia-operator@sha256:da5c3078d80878d66c616e6f8a0bb909f95d971cde2c612f96fded064113e182\\\"\"" pod="openstack-operators/octavia-operator-controller-manager-7468f855d8-wr55g" podUID="d848105d-5f25-435d-bc92-fc6f9eac9749" Oct 07 08:10:48 crc kubenswrapper[4875]: E1007 08:10:48.011963 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/keystone-operator@sha256:a6f1dcab931fd4b818010607ede65150742563b3c81a3ad3d739ef7953cace0b\\\"\"" pod="openstack-operators/keystone-operator-controller-manager-7b5ccf6d9c-lvqtd" podUID="ab712745-89ea-43bb-b2d7-7192d3691acf" Oct 07 08:10:48 crc kubenswrapper[4875]: E1007 08:10:48.012023 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/test-operator@sha256:0daf76cc40ab619ae266b11defcc1b65beb22d859369e7b1b04de9169089a4cb\\\"\"" pod="openstack-operators/test-operator-controller-manager-5cd5cb47d7-l8gnr" podUID="c57705e3-3b4e-4252-8c8d-0a21084ff5d8" Oct 07 08:10:48 crc kubenswrapper[4875]: I1007 08:10:48.018347 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/nova-operator-controller-manager-7c7fc454ff-2s4ks" podStartSLOduration=6.64550374 podStartE2EDuration="25.01832068s" podCreationTimestamp="2025-10-07 08:10:23 +0000 UTC" firstStartedPulling="2025-10-07 08:10:25.232137393 +0000 UTC m=+850.191907936" lastFinishedPulling="2025-10-07 08:10:43.604954333 +0000 UTC m=+868.564724876" observedRunningTime="2025-10-07 08:10:48.006243286 +0000 UTC m=+872.966013839" watchObservedRunningTime="2025-10-07 08:10:48.01832068 +0000 UTC m=+872.978091213" Oct 07 08:10:48 crc kubenswrapper[4875]: I1007 08:10:48.053957 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/cinder-operator-controller-manager-7d4d4f8d-8chkp" podStartSLOduration=6.861467834 podStartE2EDuration="26.053931844s" podCreationTimestamp="2025-10-07 08:10:22 +0000 UTC" firstStartedPulling="2025-10-07 08:10:24.359200057 +0000 UTC m=+849.318970600" lastFinishedPulling="2025-10-07 08:10:43.551664067 +0000 UTC m=+868.511434610" observedRunningTime="2025-10-07 08:10:48.050468879 +0000 UTC m=+873.010239422" watchObservedRunningTime="2025-10-07 08:10:48.053931844 +0000 UTC m=+873.013702387" Oct 07 08:10:48 crc kubenswrapper[4875]: I1007 08:10:48.154430 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ironic-operator-controller-manager-649675d675-6vp52" podStartSLOduration=5.957992942 podStartE2EDuration="25.154408992s" podCreationTimestamp="2025-10-07 08:10:23 +0000 UTC" firstStartedPulling="2025-10-07 08:10:24.946523832 +0000 UTC m=+849.906294375" lastFinishedPulling="2025-10-07 08:10:44.142939892 +0000 UTC m=+869.102710425" observedRunningTime="2025-10-07 08:10:48.152550677 +0000 UTC m=+873.112321230" watchObservedRunningTime="2025-10-07 08:10:48.154408992 +0000 UTC m=+873.114179535" Oct 07 08:10:48 crc kubenswrapper[4875]: I1007 08:10:48.156902 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/designate-operator-controller-manager-75dfd9b554-n9gbs" podStartSLOduration=6.456749402 podStartE2EDuration="26.156896197s" podCreationTimestamp="2025-10-07 08:10:22 +0000 UTC" firstStartedPulling="2025-10-07 08:10:24.444457697 +0000 UTC m=+849.404228240" lastFinishedPulling="2025-10-07 08:10:44.144604492 +0000 UTC m=+869.104375035" observedRunningTime="2025-10-07 08:10:48.124045097 +0000 UTC m=+873.083815670" watchObservedRunningTime="2025-10-07 08:10:48.156896197 +0000 UTC m=+873.116666740" Oct 07 08:10:48 crc kubenswrapper[4875]: I1007 08:10:48.198496 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-baremetal-operator-controller-manager-5dfbbd665cxk99t" podStartSLOduration=7.023490854 podStartE2EDuration="25.198475451s" podCreationTimestamp="2025-10-07 08:10:23 +0000 UTC" firstStartedPulling="2025-10-07 08:10:25.995699491 +0000 UTC m=+850.955470034" lastFinishedPulling="2025-10-07 08:10:44.170684088 +0000 UTC m=+869.130454631" observedRunningTime="2025-10-07 08:10:48.194540733 +0000 UTC m=+873.154311276" watchObservedRunningTime="2025-10-07 08:10:48.198475451 +0000 UTC m=+873.158245994" Oct 07 08:10:48 crc kubenswrapper[4875]: I1007 08:10:48.374112 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/watcher-operator-controller-manager-6cbc6dd547-9njzw" podStartSLOduration=5.45469405 podStartE2EDuration="25.374087315s" podCreationTimestamp="2025-10-07 08:10:23 +0000 UTC" firstStartedPulling="2025-10-07 08:10:25.703612955 +0000 UTC m=+850.663383498" lastFinishedPulling="2025-10-07 08:10:45.62300622 +0000 UTC m=+870.582776763" observedRunningTime="2025-10-07 08:10:48.36762507 +0000 UTC m=+873.327395633" watchObservedRunningTime="2025-10-07 08:10:48.374087315 +0000 UTC m=+873.333857858" Oct 07 08:10:48 crc kubenswrapper[4875]: I1007 08:10:48.393340 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/glance-operator-controller-manager-5dc44df7d5-zpbdj" podStartSLOduration=6.968708376 podStartE2EDuration="26.393313055s" podCreationTimestamp="2025-10-07 08:10:22 +0000 UTC" firstStartedPulling="2025-10-07 08:10:24.746306196 +0000 UTC m=+849.706076739" lastFinishedPulling="2025-10-07 08:10:44.170910875 +0000 UTC m=+869.130681418" observedRunningTime="2025-10-07 08:10:48.386797028 +0000 UTC m=+873.346567591" watchObservedRunningTime="2025-10-07 08:10:48.393313055 +0000 UTC m=+873.353083598" Oct 07 08:10:48 crc kubenswrapper[4875]: I1007 08:10:48.423724 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/swift-operator-controller-manager-6859f9b676-gfr8f" podStartSLOduration=5.65832885 podStartE2EDuration="25.423696141s" podCreationTimestamp="2025-10-07 08:10:23 +0000 UTC" firstStartedPulling="2025-10-07 08:10:25.703868563 +0000 UTC m=+850.663639106" lastFinishedPulling="2025-10-07 08:10:45.469235864 +0000 UTC m=+870.429006397" observedRunningTime="2025-10-07 08:10:48.418154804 +0000 UTC m=+873.377925357" watchObservedRunningTime="2025-10-07 08:10:48.423696141 +0000 UTC m=+873.383466684" Oct 07 08:10:48 crc kubenswrapper[4875]: I1007 08:10:48.436480 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/mariadb-operator-controller-manager-6cd6d7bdf5-n4t9j" podStartSLOduration=6.246229462 podStartE2EDuration="25.436453295s" podCreationTimestamp="2025-10-07 08:10:23 +0000 UTC" firstStartedPulling="2025-10-07 08:10:24.952813972 +0000 UTC m=+849.912584515" lastFinishedPulling="2025-10-07 08:10:44.143037795 +0000 UTC m=+869.102808348" observedRunningTime="2025-10-07 08:10:48.434572498 +0000 UTC m=+873.394343061" watchObservedRunningTime="2025-10-07 08:10:48.436453295 +0000 UTC m=+873.396223838" Oct 07 08:10:49 crc kubenswrapper[4875]: I1007 08:10:49.009898 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rxtrt" event={"ID":"2e73a0eb-ebc7-46a8-bde5-8cd8febc5676","Type":"ContainerStarted","Data":"05f2b36473eecb303f8a31ecc685d4df75db1b24f860417af4a0afd2b441ef70"} Oct 07 08:10:49 crc kubenswrapper[4875]: I1007 08:10:49.012549 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-hvlwz" event={"ID":"b1ea5bc6-a184-4e5f-9033-80d78eb736a9","Type":"ContainerStarted","Data":"6e355286f7f6a5b31f5ff33d33b5215288f60f892cdd7663d84d257e0332fff4"} Oct 07 08:10:50 crc kubenswrapper[4875]: I1007 08:10:50.027837 4875 generic.go:334] "Generic (PLEG): container finished" podID="b1ea5bc6-a184-4e5f-9033-80d78eb736a9" containerID="6e355286f7f6a5b31f5ff33d33b5215288f60f892cdd7663d84d257e0332fff4" exitCode=0 Oct 07 08:10:50 crc kubenswrapper[4875]: I1007 08:10:50.027998 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-hvlwz" event={"ID":"b1ea5bc6-a184-4e5f-9033-80d78eb736a9","Type":"ContainerDied","Data":"6e355286f7f6a5b31f5ff33d33b5215288f60f892cdd7663d84d257e0332fff4"} Oct 07 08:10:50 crc kubenswrapper[4875]: I1007 08:10:50.031152 4875 generic.go:334] "Generic (PLEG): container finished" podID="2e73a0eb-ebc7-46a8-bde5-8cd8febc5676" containerID="05f2b36473eecb303f8a31ecc685d4df75db1b24f860417af4a0afd2b441ef70" exitCode=0 Oct 07 08:10:50 crc kubenswrapper[4875]: I1007 08:10:50.031272 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rxtrt" event={"ID":"2e73a0eb-ebc7-46a8-bde5-8cd8febc5676","Type":"ContainerDied","Data":"05f2b36473eecb303f8a31ecc685d4df75db1b24f860417af4a0afd2b441ef70"} Oct 07 08:10:50 crc kubenswrapper[4875]: I1007 08:10:50.035849 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/heat-operator-controller-manager-54b4974c45-t97rc" Oct 07 08:10:50 crc kubenswrapper[4875]: I1007 08:10:50.038802 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/mariadb-operator-controller-manager-6cd6d7bdf5-n4t9j" Oct 07 08:10:51 crc kubenswrapper[4875]: I1007 08:10:51.041431 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-hvlwz" event={"ID":"b1ea5bc6-a184-4e5f-9033-80d78eb736a9","Type":"ContainerStarted","Data":"7a68d0707124191181c5bc74b5f11ad11c97b4525a278429ed92befcdf7eaae4"} Oct 07 08:10:51 crc kubenswrapper[4875]: I1007 08:10:51.043718 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rxtrt" event={"ID":"2e73a0eb-ebc7-46a8-bde5-8cd8febc5676","Type":"ContainerStarted","Data":"bdf0e4911b26be2a9b7a9ae0a7b0f19213dce196973230a2204ff281ed85a1f0"} Oct 07 08:10:51 crc kubenswrapper[4875]: I1007 08:10:51.070197 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-hvlwz" podStartSLOduration=20.532602012 podStartE2EDuration="23.070170231s" podCreationTimestamp="2025-10-07 08:10:28 +0000 UTC" firstStartedPulling="2025-10-07 08:10:47.977043615 +0000 UTC m=+872.936814158" lastFinishedPulling="2025-10-07 08:10:50.514611834 +0000 UTC m=+875.474382377" observedRunningTime="2025-10-07 08:10:51.063602724 +0000 UTC m=+876.023373277" watchObservedRunningTime="2025-10-07 08:10:51.070170231 +0000 UTC m=+876.029940774" Oct 07 08:10:51 crc kubenswrapper[4875]: I1007 08:10:51.096789 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-rxtrt" podStartSLOduration=16.655384044 podStartE2EDuration="19.096765323s" podCreationTimestamp="2025-10-07 08:10:32 +0000 UTC" firstStartedPulling="2025-10-07 08:10:48.011145913 +0000 UTC m=+872.970916456" lastFinishedPulling="2025-10-07 08:10:50.452527192 +0000 UTC m=+875.412297735" observedRunningTime="2025-10-07 08:10:51.091635959 +0000 UTC m=+876.051406522" watchObservedRunningTime="2025-10-07 08:10:51.096765323 +0000 UTC m=+876.056535866" Oct 07 08:10:52 crc kubenswrapper[4875]: I1007 08:10:52.462930 4875 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-rxtrt" Oct 07 08:10:52 crc kubenswrapper[4875]: I1007 08:10:52.463393 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-rxtrt" Oct 07 08:10:53 crc kubenswrapper[4875]: I1007 08:10:53.210167 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/barbican-operator-controller-manager-58c4cd55f4-tslhs" Oct 07 08:10:53 crc kubenswrapper[4875]: I1007 08:10:53.226957 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/cinder-operator-controller-manager-7d4d4f8d-8chkp" Oct 07 08:10:53 crc kubenswrapper[4875]: I1007 08:10:53.243787 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/designate-operator-controller-manager-75dfd9b554-n9gbs" Oct 07 08:10:53 crc kubenswrapper[4875]: I1007 08:10:53.294195 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/glance-operator-controller-manager-5dc44df7d5-zpbdj" Oct 07 08:10:53 crc kubenswrapper[4875]: I1007 08:10:53.591458 4875 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/community-operators-rxtrt" podUID="2e73a0eb-ebc7-46a8-bde5-8cd8febc5676" containerName="registry-server" probeResult="failure" output=< Oct 07 08:10:53 crc kubenswrapper[4875]: timeout: failed to connect service ":50051" within 1s Oct 07 08:10:53 crc kubenswrapper[4875]: > Oct 07 08:10:53 crc kubenswrapper[4875]: I1007 08:10:53.613981 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ironic-operator-controller-manager-649675d675-6vp52" Oct 07 08:10:53 crc kubenswrapper[4875]: I1007 08:10:53.774241 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/neutron-operator-controller-manager-8d984cc4d-jcb62" Oct 07 08:10:53 crc kubenswrapper[4875]: I1007 08:10:53.886358 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/nova-operator-controller-manager-7c7fc454ff-2s4ks" Oct 07 08:10:54 crc kubenswrapper[4875]: I1007 08:10:54.116778 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/swift-operator-controller-manager-6859f9b676-gfr8f" Oct 07 08:10:54 crc kubenswrapper[4875]: I1007 08:10:54.257547 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/watcher-operator-controller-manager-6cbc6dd547-9njzw" Oct 07 08:10:55 crc kubenswrapper[4875]: I1007 08:10:55.414862 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-baremetal-operator-controller-manager-5dfbbd665cxk99t" Oct 07 08:10:58 crc kubenswrapper[4875]: I1007 08:10:58.844066 4875 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-hvlwz" Oct 07 08:10:58 crc kubenswrapper[4875]: I1007 08:10:58.844469 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-hvlwz" Oct 07 08:10:58 crc kubenswrapper[4875]: I1007 08:10:58.887484 4875 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-hvlwz" Oct 07 08:11:03 crc kubenswrapper[4875]: I1007 08:10:59.165346 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-hvlwz" Oct 07 08:11:03 crc kubenswrapper[4875]: I1007 08:11:00.891044 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-hvlwz"] Oct 07 08:11:03 crc kubenswrapper[4875]: I1007 08:11:01.119100 4875 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-hvlwz" podUID="b1ea5bc6-a184-4e5f-9033-80d78eb736a9" containerName="registry-server" containerID="cri-o://7a68d0707124191181c5bc74b5f11ad11c97b4525a278429ed92befcdf7eaae4" gracePeriod=2 Oct 07 08:11:03 crc kubenswrapper[4875]: I1007 08:11:01.220758 4875 patch_prober.go:28] interesting pod/machine-config-daemon-hx68m container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 07 08:11:03 crc kubenswrapper[4875]: I1007 08:11:01.221193 4875 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" podUID="3928c10c-c3da-41eb-96b2-629d67cfb31f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 07 08:11:03 crc kubenswrapper[4875]: I1007 08:11:02.132645 4875 generic.go:334] "Generic (PLEG): container finished" podID="b1ea5bc6-a184-4e5f-9033-80d78eb736a9" containerID="7a68d0707124191181c5bc74b5f11ad11c97b4525a278429ed92befcdf7eaae4" exitCode=0 Oct 07 08:11:03 crc kubenswrapper[4875]: I1007 08:11:02.132716 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-hvlwz" event={"ID":"b1ea5bc6-a184-4e5f-9033-80d78eb736a9","Type":"ContainerDied","Data":"7a68d0707124191181c5bc74b5f11ad11c97b4525a278429ed92befcdf7eaae4"} Oct 07 08:11:03 crc kubenswrapper[4875]: I1007 08:11:02.531477 4875 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-rxtrt" Oct 07 08:11:03 crc kubenswrapper[4875]: I1007 08:11:02.654469 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-rxtrt" Oct 07 08:11:03 crc kubenswrapper[4875]: I1007 08:11:02.793022 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-hvlwz" Oct 07 08:11:03 crc kubenswrapper[4875]: I1007 08:11:02.914803 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b1ea5bc6-a184-4e5f-9033-80d78eb736a9-utilities\") pod \"b1ea5bc6-a184-4e5f-9033-80d78eb736a9\" (UID: \"b1ea5bc6-a184-4e5f-9033-80d78eb736a9\") " Oct 07 08:11:03 crc kubenswrapper[4875]: I1007 08:11:02.914949 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b1ea5bc6-a184-4e5f-9033-80d78eb736a9-catalog-content\") pod \"b1ea5bc6-a184-4e5f-9033-80d78eb736a9\" (UID: \"b1ea5bc6-a184-4e5f-9033-80d78eb736a9\") " Oct 07 08:11:03 crc kubenswrapper[4875]: I1007 08:11:02.915009 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tt84q\" (UniqueName: \"kubernetes.io/projected/b1ea5bc6-a184-4e5f-9033-80d78eb736a9-kube-api-access-tt84q\") pod \"b1ea5bc6-a184-4e5f-9033-80d78eb736a9\" (UID: \"b1ea5bc6-a184-4e5f-9033-80d78eb736a9\") " Oct 07 08:11:03 crc kubenswrapper[4875]: I1007 08:11:02.915964 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b1ea5bc6-a184-4e5f-9033-80d78eb736a9-utilities" (OuterVolumeSpecName: "utilities") pod "b1ea5bc6-a184-4e5f-9033-80d78eb736a9" (UID: "b1ea5bc6-a184-4e5f-9033-80d78eb736a9"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 08:11:03 crc kubenswrapper[4875]: I1007 08:11:02.922210 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b1ea5bc6-a184-4e5f-9033-80d78eb736a9-kube-api-access-tt84q" (OuterVolumeSpecName: "kube-api-access-tt84q") pod "b1ea5bc6-a184-4e5f-9033-80d78eb736a9" (UID: "b1ea5bc6-a184-4e5f-9033-80d78eb736a9"). InnerVolumeSpecName "kube-api-access-tt84q". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 08:11:03 crc kubenswrapper[4875]: I1007 08:11:02.927964 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b1ea5bc6-a184-4e5f-9033-80d78eb736a9-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b1ea5bc6-a184-4e5f-9033-80d78eb736a9" (UID: "b1ea5bc6-a184-4e5f-9033-80d78eb736a9"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 08:11:03 crc kubenswrapper[4875]: I1007 08:11:03.017576 4875 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b1ea5bc6-a184-4e5f-9033-80d78eb736a9-utilities\") on node \"crc\" DevicePath \"\"" Oct 07 08:11:03 crc kubenswrapper[4875]: I1007 08:11:03.017649 4875 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b1ea5bc6-a184-4e5f-9033-80d78eb736a9-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 07 08:11:03 crc kubenswrapper[4875]: I1007 08:11:03.017670 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tt84q\" (UniqueName: \"kubernetes.io/projected/b1ea5bc6-a184-4e5f-9033-80d78eb736a9-kube-api-access-tt84q\") on node \"crc\" DevicePath \"\"" Oct 07 08:11:03 crc kubenswrapper[4875]: I1007 08:11:03.150794 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-hvlwz" event={"ID":"b1ea5bc6-a184-4e5f-9033-80d78eb736a9","Type":"ContainerDied","Data":"f6109359d710263272e944cd162478a8aa69e2d690a2df652113a3b3164ae600"} Oct 07 08:11:03 crc kubenswrapper[4875]: I1007 08:11:03.150916 4875 scope.go:117] "RemoveContainer" containerID="7a68d0707124191181c5bc74b5f11ad11c97b4525a278429ed92befcdf7eaae4" Oct 07 08:11:03 crc kubenswrapper[4875]: I1007 08:11:03.150873 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-hvlwz" Oct 07 08:11:03 crc kubenswrapper[4875]: I1007 08:11:03.205103 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-hvlwz"] Oct 07 08:11:03 crc kubenswrapper[4875]: I1007 08:11:03.212320 4875 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-hvlwz"] Oct 07 08:11:03 crc kubenswrapper[4875]: I1007 08:11:03.666063 4875 scope.go:117] "RemoveContainer" containerID="6e355286f7f6a5b31f5ff33d33b5215288f60f892cdd7663d84d257e0332fff4" Oct 07 08:11:03 crc kubenswrapper[4875]: I1007 08:11:03.721025 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b1ea5bc6-a184-4e5f-9033-80d78eb736a9" path="/var/lib/kubelet/pods/b1ea5bc6-a184-4e5f-9033-80d78eb736a9/volumes" Oct 07 08:11:03 crc kubenswrapper[4875]: I1007 08:11:03.786685 4875 scope.go:117] "RemoveContainer" containerID="835ef2a6c276da855d64223aaba0636a2a1e02d14a039c5b5b2aad719d72ceea" Oct 07 08:11:04 crc kubenswrapper[4875]: I1007 08:11:04.171926 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-5d4d74dd89-z98x2" event={"ID":"4aab0800-b79f-42f6-8d27-ce34e631f086","Type":"ContainerStarted","Data":"c19dd317d9b2373361ff593ac6efd1a9467378041fb4b02715c9f2205a939870"} Oct 07 08:11:04 crc kubenswrapper[4875]: I1007 08:11:04.173201 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/telemetry-operator-controller-manager-5d4d74dd89-z98x2" Oct 07 08:11:04 crc kubenswrapper[4875]: I1007 08:11:04.202550 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/telemetry-operator-controller-manager-5d4d74dd89-z98x2" podStartSLOduration=3.1084466 podStartE2EDuration="41.202521673s" podCreationTimestamp="2025-10-07 08:10:23 +0000 UTC" firstStartedPulling="2025-10-07 08:10:25.69248599 +0000 UTC m=+850.652256533" lastFinishedPulling="2025-10-07 08:11:03.786561063 +0000 UTC m=+888.746331606" observedRunningTime="2025-10-07 08:11:04.194291025 +0000 UTC m=+889.154061578" watchObservedRunningTime="2025-10-07 08:11:04.202521673 +0000 UTC m=+889.162292216" Oct 07 08:11:05 crc kubenswrapper[4875]: I1007 08:11:05.187010 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-7b5ccf6d9c-lvqtd" event={"ID":"ab712745-89ea-43bb-b2d7-7192d3691acf","Type":"ContainerStarted","Data":"0266f5e19b04d5340ebacfba223622e8246ec7f8275cbfe36555cd2e74d3d350"} Oct 07 08:11:05 crc kubenswrapper[4875]: I1007 08:11:05.187546 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/keystone-operator-controller-manager-7b5ccf6d9c-lvqtd" Oct 07 08:11:05 crc kubenswrapper[4875]: I1007 08:11:05.190193 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-65d89cfd9f-zwhfp" event={"ID":"45a1f3f1-6fbc-47d9-81c4-77e8ede46dd8","Type":"ContainerStarted","Data":"9ba99bb94412463cf85d02e68c18c3b6217d0a66adfd59428df9685659d8b9b0"} Oct 07 08:11:05 crc kubenswrapper[4875]: I1007 08:11:05.190535 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/manila-operator-controller-manager-65d89cfd9f-zwhfp" Oct 07 08:11:05 crc kubenswrapper[4875]: I1007 08:11:05.192568 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-54689d9f88-8b99x" event={"ID":"9e09310b-4437-4e30-881f-ed2dd568aa16","Type":"ContainerStarted","Data":"c70e4523c0c5ee56f9fa053bda86b47e728c1d23303df7628fcf127738a879ec"} Oct 07 08:11:05 crc kubenswrapper[4875]: I1007 08:11:05.192766 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/placement-operator-controller-manager-54689d9f88-8b99x" Oct 07 08:11:05 crc kubenswrapper[4875]: I1007 08:11:05.194486 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-6d8b6f9b9-dtxrv" event={"ID":"f5fb651c-9cca-4f7c-9136-37534358a8dd","Type":"ContainerStarted","Data":"0fa83eeb5b8cf32ed60ca3d5439d0ff0e141fb2374db5550d45b4da3c3d99f01"} Oct 07 08:11:05 crc kubenswrapper[4875]: I1007 08:11:05.194900 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ovn-operator-controller-manager-6d8b6f9b9-dtxrv" Oct 07 08:11:05 crc kubenswrapper[4875]: I1007 08:11:05.197205 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-658588b8c9-l77jd" event={"ID":"0cce3a76-3617-40be-8d2b-b8f9184e6b61","Type":"ContainerStarted","Data":"4451e0b818a82c0609d02dfffad875493046972afaefd177285ae860a11ad629"} Oct 07 08:11:05 crc kubenswrapper[4875]: I1007 08:11:05.197449 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/infra-operator-controller-manager-658588b8c9-l77jd" Oct 07 08:11:05 crc kubenswrapper[4875]: I1007 08:11:05.199423 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-5cd5cb47d7-l8gnr" event={"ID":"c57705e3-3b4e-4252-8c8d-0a21084ff5d8","Type":"ContainerStarted","Data":"dfb4ac848f4fd63286f5adeb227a47ae757f140436308ce96056337a01a4db3f"} Oct 07 08:11:05 crc kubenswrapper[4875]: I1007 08:11:05.199623 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/test-operator-controller-manager-5cd5cb47d7-l8gnr" Oct 07 08:11:05 crc kubenswrapper[4875]: I1007 08:11:05.201139 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-76d5b87f47-g6cn6" event={"ID":"52379554-390c-4cb2-97ae-0cb0596f36d1","Type":"ContainerStarted","Data":"1926735abcff14541a38a6cae9394535ba3978d7fc4f67255ddb1a88d2df368b"} Oct 07 08:11:05 crc kubenswrapper[4875]: I1007 08:11:05.201510 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/horizon-operator-controller-manager-76d5b87f47-g6cn6" Oct 07 08:11:05 crc kubenswrapper[4875]: I1007 08:11:05.203116 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-7468f855d8-wr55g" event={"ID":"d848105d-5f25-435d-bc92-fc6f9eac9749","Type":"ContainerStarted","Data":"284e73713ff4be98e07d197b2d3617e6ee97675c1e14d94ad574204031b4dd77"} Oct 07 08:11:05 crc kubenswrapper[4875]: I1007 08:11:05.203516 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/octavia-operator-controller-manager-7468f855d8-wr55g" Oct 07 08:11:05 crc kubenswrapper[4875]: I1007 08:11:05.207261 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/keystone-operator-controller-manager-7b5ccf6d9c-lvqtd" podStartSLOduration=3.688448406 podStartE2EDuration="42.207218291s" podCreationTimestamp="2025-10-07 08:10:23 +0000 UTC" firstStartedPulling="2025-10-07 08:10:25.310682001 +0000 UTC m=+850.270452544" lastFinishedPulling="2025-10-07 08:11:03.829451886 +0000 UTC m=+888.789222429" observedRunningTime="2025-10-07 08:11:05.205255281 +0000 UTC m=+890.165025834" watchObservedRunningTime="2025-10-07 08:11:05.207218291 +0000 UTC m=+890.166988834" Oct 07 08:11:05 crc kubenswrapper[4875]: I1007 08:11:05.227022 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/test-operator-controller-manager-5cd5cb47d7-l8gnr" podStartSLOduration=3.89196632 podStartE2EDuration="42.226998707s" podCreationTimestamp="2025-10-07 08:10:23 +0000 UTC" firstStartedPulling="2025-10-07 08:10:25.692898032 +0000 UTC m=+850.652668575" lastFinishedPulling="2025-10-07 08:11:04.027930409 +0000 UTC m=+888.987700962" observedRunningTime="2025-10-07 08:11:05.224311486 +0000 UTC m=+890.184082049" watchObservedRunningTime="2025-10-07 08:11:05.226998707 +0000 UTC m=+890.186769250" Oct 07 08:11:05 crc kubenswrapper[4875]: I1007 08:11:05.244668 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ovn-operator-controller-manager-6d8b6f9b9-dtxrv" podStartSLOduration=3.556442034 podStartE2EDuration="42.244643658s" podCreationTimestamp="2025-10-07 08:10:23 +0000 UTC" firstStartedPulling="2025-10-07 08:10:25.298985438 +0000 UTC m=+850.258755981" lastFinishedPulling="2025-10-07 08:11:03.987187052 +0000 UTC m=+888.946957605" observedRunningTime="2025-10-07 08:11:05.241891686 +0000 UTC m=+890.201662229" watchObservedRunningTime="2025-10-07 08:11:05.244643658 +0000 UTC m=+890.204414211" Oct 07 08:11:05 crc kubenswrapper[4875]: I1007 08:11:05.269510 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/horizon-operator-controller-manager-76d5b87f47-g6cn6" podStartSLOduration=4.28394915 podStartE2EDuration="43.269484588s" podCreationTimestamp="2025-10-07 08:10:22 +0000 UTC" firstStartedPulling="2025-10-07 08:10:24.827775562 +0000 UTC m=+849.787546105" lastFinishedPulling="2025-10-07 08:11:03.813311 +0000 UTC m=+888.773081543" observedRunningTime="2025-10-07 08:11:05.260712804 +0000 UTC m=+890.220483357" watchObservedRunningTime="2025-10-07 08:11:05.269484588 +0000 UTC m=+890.229255141" Oct 07 08:11:05 crc kubenswrapper[4875]: I1007 08:11:05.288768 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/infra-operator-controller-manager-658588b8c9-l77jd" podStartSLOduration=4.066765043 podStartE2EDuration="43.288736728s" podCreationTimestamp="2025-10-07 08:10:22 +0000 UTC" firstStartedPulling="2025-10-07 08:10:24.444019793 +0000 UTC m=+849.403790336" lastFinishedPulling="2025-10-07 08:11:03.665991438 +0000 UTC m=+888.625762021" observedRunningTime="2025-10-07 08:11:05.284446379 +0000 UTC m=+890.244216972" watchObservedRunningTime="2025-10-07 08:11:05.288736728 +0000 UTC m=+890.248507271" Oct 07 08:11:05 crc kubenswrapper[4875]: I1007 08:11:05.308395 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/placement-operator-controller-manager-54689d9f88-8b99x" podStartSLOduration=4.171060474 podStartE2EDuration="42.30837247s" podCreationTimestamp="2025-10-07 08:10:23 +0000 UTC" firstStartedPulling="2025-10-07 08:10:25.676718345 +0000 UTC m=+850.636488898" lastFinishedPulling="2025-10-07 08:11:03.814030351 +0000 UTC m=+888.773800894" observedRunningTime="2025-10-07 08:11:05.303068381 +0000 UTC m=+890.262838954" watchObservedRunningTime="2025-10-07 08:11:05.30837247 +0000 UTC m=+890.268143023" Oct 07 08:11:05 crc kubenswrapper[4875]: I1007 08:11:05.328484 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/manila-operator-controller-manager-65d89cfd9f-zwhfp" podStartSLOduration=3.736335738 podStartE2EDuration="42.328455445s" podCreationTimestamp="2025-10-07 08:10:23 +0000 UTC" firstStartedPulling="2025-10-07 08:10:25.220333457 +0000 UTC m=+850.180104000" lastFinishedPulling="2025-10-07 08:11:03.812453164 +0000 UTC m=+888.772223707" observedRunningTime="2025-10-07 08:11:05.322040802 +0000 UTC m=+890.281811365" watchObservedRunningTime="2025-10-07 08:11:05.328455445 +0000 UTC m=+890.288225988" Oct 07 08:11:05 crc kubenswrapper[4875]: I1007 08:11:05.349741 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/octavia-operator-controller-manager-7468f855d8-wr55g" podStartSLOduration=3.860510042 podStartE2EDuration="42.349720887s" podCreationTimestamp="2025-10-07 08:10:23 +0000 UTC" firstStartedPulling="2025-10-07 08:10:25.32296055 +0000 UTC m=+850.282731093" lastFinishedPulling="2025-10-07 08:11:03.812171355 +0000 UTC m=+888.771941938" observedRunningTime="2025-10-07 08:11:05.343606182 +0000 UTC m=+890.303376735" watchObservedRunningTime="2025-10-07 08:11:05.349720887 +0000 UTC m=+890.309491430" Oct 07 08:11:05 crc kubenswrapper[4875]: I1007 08:11:05.690886 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-rxtrt"] Oct 07 08:11:05 crc kubenswrapper[4875]: I1007 08:11:05.692667 4875 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-rxtrt" podUID="2e73a0eb-ebc7-46a8-bde5-8cd8febc5676" containerName="registry-server" containerID="cri-o://bdf0e4911b26be2a9b7a9ae0a7b0f19213dce196973230a2204ff281ed85a1f0" gracePeriod=2 Oct 07 08:11:06 crc kubenswrapper[4875]: I1007 08:11:06.093398 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-rxtrt" Oct 07 08:11:06 crc kubenswrapper[4875]: I1007 08:11:06.214794 4875 generic.go:334] "Generic (PLEG): container finished" podID="2e73a0eb-ebc7-46a8-bde5-8cd8febc5676" containerID="bdf0e4911b26be2a9b7a9ae0a7b0f19213dce196973230a2204ff281ed85a1f0" exitCode=0 Oct 07 08:11:06 crc kubenswrapper[4875]: I1007 08:11:06.214877 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-rxtrt" Oct 07 08:11:06 crc kubenswrapper[4875]: I1007 08:11:06.214917 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rxtrt" event={"ID":"2e73a0eb-ebc7-46a8-bde5-8cd8febc5676","Type":"ContainerDied","Data":"bdf0e4911b26be2a9b7a9ae0a7b0f19213dce196973230a2204ff281ed85a1f0"} Oct 07 08:11:06 crc kubenswrapper[4875]: I1007 08:11:06.216062 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rxtrt" event={"ID":"2e73a0eb-ebc7-46a8-bde5-8cd8febc5676","Type":"ContainerDied","Data":"81ca85165d6f8af4712011669a050045fd78c88a2dc46a428d57e8ede0c4b5a0"} Oct 07 08:11:06 crc kubenswrapper[4875]: I1007 08:11:06.216105 4875 scope.go:117] "RemoveContainer" containerID="bdf0e4911b26be2a9b7a9ae0a7b0f19213dce196973230a2204ff281ed85a1f0" Oct 07 08:11:06 crc kubenswrapper[4875]: I1007 08:11:06.239204 4875 scope.go:117] "RemoveContainer" containerID="05f2b36473eecb303f8a31ecc685d4df75db1b24f860417af4a0afd2b441ef70" Oct 07 08:11:06 crc kubenswrapper[4875]: I1007 08:11:06.260412 4875 scope.go:117] "RemoveContainer" containerID="bb2c6bfc16a860a8fda09f6c7b9a654fbd9cd1ac379fa953af385bbc7ecadb91" Oct 07 08:11:06 crc kubenswrapper[4875]: I1007 08:11:06.276730 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2e73a0eb-ebc7-46a8-bde5-8cd8febc5676-catalog-content\") pod \"2e73a0eb-ebc7-46a8-bde5-8cd8febc5676\" (UID: \"2e73a0eb-ebc7-46a8-bde5-8cd8febc5676\") " Oct 07 08:11:06 crc kubenswrapper[4875]: I1007 08:11:06.276876 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2e73a0eb-ebc7-46a8-bde5-8cd8febc5676-utilities\") pod \"2e73a0eb-ebc7-46a8-bde5-8cd8febc5676\" (UID: \"2e73a0eb-ebc7-46a8-bde5-8cd8febc5676\") " Oct 07 08:11:06 crc kubenswrapper[4875]: I1007 08:11:06.276929 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rbgj2\" (UniqueName: \"kubernetes.io/projected/2e73a0eb-ebc7-46a8-bde5-8cd8febc5676-kube-api-access-rbgj2\") pod \"2e73a0eb-ebc7-46a8-bde5-8cd8febc5676\" (UID: \"2e73a0eb-ebc7-46a8-bde5-8cd8febc5676\") " Oct 07 08:11:06 crc kubenswrapper[4875]: I1007 08:11:06.279468 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2e73a0eb-ebc7-46a8-bde5-8cd8febc5676-utilities" (OuterVolumeSpecName: "utilities") pod "2e73a0eb-ebc7-46a8-bde5-8cd8febc5676" (UID: "2e73a0eb-ebc7-46a8-bde5-8cd8febc5676"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 08:11:06 crc kubenswrapper[4875]: I1007 08:11:06.282047 4875 scope.go:117] "RemoveContainer" containerID="bdf0e4911b26be2a9b7a9ae0a7b0f19213dce196973230a2204ff281ed85a1f0" Oct 07 08:11:06 crc kubenswrapper[4875]: E1007 08:11:06.282549 4875 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bdf0e4911b26be2a9b7a9ae0a7b0f19213dce196973230a2204ff281ed85a1f0\": container with ID starting with bdf0e4911b26be2a9b7a9ae0a7b0f19213dce196973230a2204ff281ed85a1f0 not found: ID does not exist" containerID="bdf0e4911b26be2a9b7a9ae0a7b0f19213dce196973230a2204ff281ed85a1f0" Oct 07 08:11:06 crc kubenswrapper[4875]: I1007 08:11:06.282593 4875 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bdf0e4911b26be2a9b7a9ae0a7b0f19213dce196973230a2204ff281ed85a1f0"} err="failed to get container status \"bdf0e4911b26be2a9b7a9ae0a7b0f19213dce196973230a2204ff281ed85a1f0\": rpc error: code = NotFound desc = could not find container \"bdf0e4911b26be2a9b7a9ae0a7b0f19213dce196973230a2204ff281ed85a1f0\": container with ID starting with bdf0e4911b26be2a9b7a9ae0a7b0f19213dce196973230a2204ff281ed85a1f0 not found: ID does not exist" Oct 07 08:11:06 crc kubenswrapper[4875]: I1007 08:11:06.282627 4875 scope.go:117] "RemoveContainer" containerID="05f2b36473eecb303f8a31ecc685d4df75db1b24f860417af4a0afd2b441ef70" Oct 07 08:11:06 crc kubenswrapper[4875]: E1007 08:11:06.282856 4875 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"05f2b36473eecb303f8a31ecc685d4df75db1b24f860417af4a0afd2b441ef70\": container with ID starting with 05f2b36473eecb303f8a31ecc685d4df75db1b24f860417af4a0afd2b441ef70 not found: ID does not exist" containerID="05f2b36473eecb303f8a31ecc685d4df75db1b24f860417af4a0afd2b441ef70" Oct 07 08:11:06 crc kubenswrapper[4875]: I1007 08:11:06.282881 4875 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"05f2b36473eecb303f8a31ecc685d4df75db1b24f860417af4a0afd2b441ef70"} err="failed to get container status \"05f2b36473eecb303f8a31ecc685d4df75db1b24f860417af4a0afd2b441ef70\": rpc error: code = NotFound desc = could not find container \"05f2b36473eecb303f8a31ecc685d4df75db1b24f860417af4a0afd2b441ef70\": container with ID starting with 05f2b36473eecb303f8a31ecc685d4df75db1b24f860417af4a0afd2b441ef70 not found: ID does not exist" Oct 07 08:11:06 crc kubenswrapper[4875]: I1007 08:11:06.282916 4875 scope.go:117] "RemoveContainer" containerID="bb2c6bfc16a860a8fda09f6c7b9a654fbd9cd1ac379fa953af385bbc7ecadb91" Oct 07 08:11:06 crc kubenswrapper[4875]: E1007 08:11:06.283227 4875 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bb2c6bfc16a860a8fda09f6c7b9a654fbd9cd1ac379fa953af385bbc7ecadb91\": container with ID starting with bb2c6bfc16a860a8fda09f6c7b9a654fbd9cd1ac379fa953af385bbc7ecadb91 not found: ID does not exist" containerID="bb2c6bfc16a860a8fda09f6c7b9a654fbd9cd1ac379fa953af385bbc7ecadb91" Oct 07 08:11:06 crc kubenswrapper[4875]: I1007 08:11:06.283318 4875 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bb2c6bfc16a860a8fda09f6c7b9a654fbd9cd1ac379fa953af385bbc7ecadb91"} err="failed to get container status \"bb2c6bfc16a860a8fda09f6c7b9a654fbd9cd1ac379fa953af385bbc7ecadb91\": rpc error: code = NotFound desc = could not find container \"bb2c6bfc16a860a8fda09f6c7b9a654fbd9cd1ac379fa953af385bbc7ecadb91\": container with ID starting with bb2c6bfc16a860a8fda09f6c7b9a654fbd9cd1ac379fa953af385bbc7ecadb91 not found: ID does not exist" Oct 07 08:11:06 crc kubenswrapper[4875]: I1007 08:11:06.283746 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2e73a0eb-ebc7-46a8-bde5-8cd8febc5676-kube-api-access-rbgj2" (OuterVolumeSpecName: "kube-api-access-rbgj2") pod "2e73a0eb-ebc7-46a8-bde5-8cd8febc5676" (UID: "2e73a0eb-ebc7-46a8-bde5-8cd8febc5676"). InnerVolumeSpecName "kube-api-access-rbgj2". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 08:11:06 crc kubenswrapper[4875]: I1007 08:11:06.333945 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2e73a0eb-ebc7-46a8-bde5-8cd8febc5676-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "2e73a0eb-ebc7-46a8-bde5-8cd8febc5676" (UID: "2e73a0eb-ebc7-46a8-bde5-8cd8febc5676"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 08:11:06 crc kubenswrapper[4875]: I1007 08:11:06.381531 4875 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2e73a0eb-ebc7-46a8-bde5-8cd8febc5676-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 07 08:11:06 crc kubenswrapper[4875]: I1007 08:11:06.381564 4875 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2e73a0eb-ebc7-46a8-bde5-8cd8febc5676-utilities\") on node \"crc\" DevicePath \"\"" Oct 07 08:11:06 crc kubenswrapper[4875]: I1007 08:11:06.381577 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rbgj2\" (UniqueName: \"kubernetes.io/projected/2e73a0eb-ebc7-46a8-bde5-8cd8febc5676-kube-api-access-rbgj2\") on node \"crc\" DevicePath \"\"" Oct 07 08:11:06 crc kubenswrapper[4875]: I1007 08:11:06.567350 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-rxtrt"] Oct 07 08:11:06 crc kubenswrapper[4875]: I1007 08:11:06.572536 4875 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-rxtrt"] Oct 07 08:11:07 crc kubenswrapper[4875]: I1007 08:11:07.709615 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2e73a0eb-ebc7-46a8-bde5-8cd8febc5676" path="/var/lib/kubelet/pods/2e73a0eb-ebc7-46a8-bde5-8cd8febc5676/volumes" Oct 07 08:11:13 crc kubenswrapper[4875]: I1007 08:11:13.345362 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/horizon-operator-controller-manager-76d5b87f47-g6cn6" Oct 07 08:11:13 crc kubenswrapper[4875]: I1007 08:11:13.392282 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/infra-operator-controller-manager-658588b8c9-l77jd" Oct 07 08:11:13 crc kubenswrapper[4875]: I1007 08:11:13.636982 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/keystone-operator-controller-manager-7b5ccf6d9c-lvqtd" Oct 07 08:11:13 crc kubenswrapper[4875]: I1007 08:11:13.637569 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ovn-operator-controller-manager-6d8b6f9b9-dtxrv" Oct 07 08:11:13 crc kubenswrapper[4875]: I1007 08:11:13.661746 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/manila-operator-controller-manager-65d89cfd9f-zwhfp" Oct 07 08:11:13 crc kubenswrapper[4875]: I1007 08:11:13.803227 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/octavia-operator-controller-manager-7468f855d8-wr55g" Oct 07 08:11:13 crc kubenswrapper[4875]: I1007 08:11:13.971960 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/placement-operator-controller-manager-54689d9f88-8b99x" Oct 07 08:11:14 crc kubenswrapper[4875]: I1007 08:11:14.112354 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/telemetry-operator-controller-manager-5d4d74dd89-z98x2" Oct 07 08:11:14 crc kubenswrapper[4875]: I1007 08:11:14.139355 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/test-operator-controller-manager-5cd5cb47d7-l8gnr" Oct 07 08:11:30 crc kubenswrapper[4875]: I1007 08:11:30.557474 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-zg72b"] Oct 07 08:11:30 crc kubenswrapper[4875]: E1007 08:11:30.558768 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b1ea5bc6-a184-4e5f-9033-80d78eb736a9" containerName="extract-content" Oct 07 08:11:30 crc kubenswrapper[4875]: I1007 08:11:30.558788 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="b1ea5bc6-a184-4e5f-9033-80d78eb736a9" containerName="extract-content" Oct 07 08:11:30 crc kubenswrapper[4875]: E1007 08:11:30.558831 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b1ea5bc6-a184-4e5f-9033-80d78eb736a9" containerName="registry-server" Oct 07 08:11:30 crc kubenswrapper[4875]: I1007 08:11:30.558841 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="b1ea5bc6-a184-4e5f-9033-80d78eb736a9" containerName="registry-server" Oct 07 08:11:30 crc kubenswrapper[4875]: E1007 08:11:30.558852 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b1ea5bc6-a184-4e5f-9033-80d78eb736a9" containerName="extract-utilities" Oct 07 08:11:30 crc kubenswrapper[4875]: I1007 08:11:30.558861 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="b1ea5bc6-a184-4e5f-9033-80d78eb736a9" containerName="extract-utilities" Oct 07 08:11:30 crc kubenswrapper[4875]: E1007 08:11:30.558895 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2e73a0eb-ebc7-46a8-bde5-8cd8febc5676" containerName="registry-server" Oct 07 08:11:30 crc kubenswrapper[4875]: I1007 08:11:30.558905 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="2e73a0eb-ebc7-46a8-bde5-8cd8febc5676" containerName="registry-server" Oct 07 08:11:30 crc kubenswrapper[4875]: E1007 08:11:30.558926 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2e73a0eb-ebc7-46a8-bde5-8cd8febc5676" containerName="extract-utilities" Oct 07 08:11:30 crc kubenswrapper[4875]: I1007 08:11:30.558933 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="2e73a0eb-ebc7-46a8-bde5-8cd8febc5676" containerName="extract-utilities" Oct 07 08:11:30 crc kubenswrapper[4875]: E1007 08:11:30.558949 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2e73a0eb-ebc7-46a8-bde5-8cd8febc5676" containerName="extract-content" Oct 07 08:11:30 crc kubenswrapper[4875]: I1007 08:11:30.558956 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="2e73a0eb-ebc7-46a8-bde5-8cd8febc5676" containerName="extract-content" Oct 07 08:11:30 crc kubenswrapper[4875]: I1007 08:11:30.559196 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="2e73a0eb-ebc7-46a8-bde5-8cd8febc5676" containerName="registry-server" Oct 07 08:11:30 crc kubenswrapper[4875]: I1007 08:11:30.559209 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="b1ea5bc6-a184-4e5f-9033-80d78eb736a9" containerName="registry-server" Oct 07 08:11:30 crc kubenswrapper[4875]: I1007 08:11:30.560213 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-zg72b" Oct 07 08:11:30 crc kubenswrapper[4875]: I1007 08:11:30.564070 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"kube-root-ca.crt" Oct 07 08:11:30 crc kubenswrapper[4875]: I1007 08:11:30.564358 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openshift-service-ca.crt" Oct 07 08:11:30 crc kubenswrapper[4875]: I1007 08:11:30.568454 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dnsmasq-dns-dockercfg-z4hjt" Oct 07 08:11:30 crc kubenswrapper[4875]: I1007 08:11:30.568518 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns" Oct 07 08:11:30 crc kubenswrapper[4875]: I1007 08:11:30.579984 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-zg72b"] Oct 07 08:11:30 crc kubenswrapper[4875]: I1007 08:11:30.683086 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wgf5t\" (UniqueName: \"kubernetes.io/projected/c95ebd05-76be-47ee-ad07-c1e84d34a235-kube-api-access-wgf5t\") pod \"dnsmasq-dns-675f4bcbfc-zg72b\" (UID: \"c95ebd05-76be-47ee-ad07-c1e84d34a235\") " pod="openstack/dnsmasq-dns-675f4bcbfc-zg72b" Oct 07 08:11:30 crc kubenswrapper[4875]: I1007 08:11:30.683322 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c95ebd05-76be-47ee-ad07-c1e84d34a235-config\") pod \"dnsmasq-dns-675f4bcbfc-zg72b\" (UID: \"c95ebd05-76be-47ee-ad07-c1e84d34a235\") " pod="openstack/dnsmasq-dns-675f4bcbfc-zg72b" Oct 07 08:11:30 crc kubenswrapper[4875]: I1007 08:11:30.784571 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c95ebd05-76be-47ee-ad07-c1e84d34a235-config\") pod \"dnsmasq-dns-675f4bcbfc-zg72b\" (UID: \"c95ebd05-76be-47ee-ad07-c1e84d34a235\") " pod="openstack/dnsmasq-dns-675f4bcbfc-zg72b" Oct 07 08:11:30 crc kubenswrapper[4875]: I1007 08:11:30.784690 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wgf5t\" (UniqueName: \"kubernetes.io/projected/c95ebd05-76be-47ee-ad07-c1e84d34a235-kube-api-access-wgf5t\") pod \"dnsmasq-dns-675f4bcbfc-zg72b\" (UID: \"c95ebd05-76be-47ee-ad07-c1e84d34a235\") " pod="openstack/dnsmasq-dns-675f4bcbfc-zg72b" Oct 07 08:11:30 crc kubenswrapper[4875]: I1007 08:11:30.785862 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c95ebd05-76be-47ee-ad07-c1e84d34a235-config\") pod \"dnsmasq-dns-675f4bcbfc-zg72b\" (UID: \"c95ebd05-76be-47ee-ad07-c1e84d34a235\") " pod="openstack/dnsmasq-dns-675f4bcbfc-zg72b" Oct 07 08:11:30 crc kubenswrapper[4875]: I1007 08:11:30.823367 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-4kj9j"] Oct 07 08:11:30 crc kubenswrapper[4875]: I1007 08:11:30.824637 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-4kj9j" Oct 07 08:11:30 crc kubenswrapper[4875]: I1007 08:11:30.825110 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wgf5t\" (UniqueName: \"kubernetes.io/projected/c95ebd05-76be-47ee-ad07-c1e84d34a235-kube-api-access-wgf5t\") pod \"dnsmasq-dns-675f4bcbfc-zg72b\" (UID: \"c95ebd05-76be-47ee-ad07-c1e84d34a235\") " pod="openstack/dnsmasq-dns-675f4bcbfc-zg72b" Oct 07 08:11:30 crc kubenswrapper[4875]: I1007 08:11:30.835583 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-svc" Oct 07 08:11:30 crc kubenswrapper[4875]: I1007 08:11:30.839034 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-4kj9j"] Oct 07 08:11:30 crc kubenswrapper[4875]: I1007 08:11:30.884470 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-zg72b" Oct 07 08:11:30 crc kubenswrapper[4875]: I1007 08:11:30.988953 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1d0e537c-bf41-486e-89dc-b90686db28e7-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-4kj9j\" (UID: \"1d0e537c-bf41-486e-89dc-b90686db28e7\") " pod="openstack/dnsmasq-dns-78dd6ddcc-4kj9j" Oct 07 08:11:30 crc kubenswrapper[4875]: I1007 08:11:30.989018 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wvx6v\" (UniqueName: \"kubernetes.io/projected/1d0e537c-bf41-486e-89dc-b90686db28e7-kube-api-access-wvx6v\") pod \"dnsmasq-dns-78dd6ddcc-4kj9j\" (UID: \"1d0e537c-bf41-486e-89dc-b90686db28e7\") " pod="openstack/dnsmasq-dns-78dd6ddcc-4kj9j" Oct 07 08:11:30 crc kubenswrapper[4875]: I1007 08:11:30.989099 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1d0e537c-bf41-486e-89dc-b90686db28e7-config\") pod \"dnsmasq-dns-78dd6ddcc-4kj9j\" (UID: \"1d0e537c-bf41-486e-89dc-b90686db28e7\") " pod="openstack/dnsmasq-dns-78dd6ddcc-4kj9j" Oct 07 08:11:31 crc kubenswrapper[4875]: I1007 08:11:31.090605 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1d0e537c-bf41-486e-89dc-b90686db28e7-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-4kj9j\" (UID: \"1d0e537c-bf41-486e-89dc-b90686db28e7\") " pod="openstack/dnsmasq-dns-78dd6ddcc-4kj9j" Oct 07 08:11:31 crc kubenswrapper[4875]: I1007 08:11:31.091139 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wvx6v\" (UniqueName: \"kubernetes.io/projected/1d0e537c-bf41-486e-89dc-b90686db28e7-kube-api-access-wvx6v\") pod \"dnsmasq-dns-78dd6ddcc-4kj9j\" (UID: \"1d0e537c-bf41-486e-89dc-b90686db28e7\") " pod="openstack/dnsmasq-dns-78dd6ddcc-4kj9j" Oct 07 08:11:31 crc kubenswrapper[4875]: I1007 08:11:31.091242 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1d0e537c-bf41-486e-89dc-b90686db28e7-config\") pod \"dnsmasq-dns-78dd6ddcc-4kj9j\" (UID: \"1d0e537c-bf41-486e-89dc-b90686db28e7\") " pod="openstack/dnsmasq-dns-78dd6ddcc-4kj9j" Oct 07 08:11:31 crc kubenswrapper[4875]: I1007 08:11:31.091566 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1d0e537c-bf41-486e-89dc-b90686db28e7-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-4kj9j\" (UID: \"1d0e537c-bf41-486e-89dc-b90686db28e7\") " pod="openstack/dnsmasq-dns-78dd6ddcc-4kj9j" Oct 07 08:11:31 crc kubenswrapper[4875]: I1007 08:11:31.092538 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1d0e537c-bf41-486e-89dc-b90686db28e7-config\") pod \"dnsmasq-dns-78dd6ddcc-4kj9j\" (UID: \"1d0e537c-bf41-486e-89dc-b90686db28e7\") " pod="openstack/dnsmasq-dns-78dd6ddcc-4kj9j" Oct 07 08:11:31 crc kubenswrapper[4875]: I1007 08:11:31.118131 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wvx6v\" (UniqueName: \"kubernetes.io/projected/1d0e537c-bf41-486e-89dc-b90686db28e7-kube-api-access-wvx6v\") pod \"dnsmasq-dns-78dd6ddcc-4kj9j\" (UID: \"1d0e537c-bf41-486e-89dc-b90686db28e7\") " pod="openstack/dnsmasq-dns-78dd6ddcc-4kj9j" Oct 07 08:11:31 crc kubenswrapper[4875]: I1007 08:11:31.150073 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-4kj9j" Oct 07 08:11:31 crc kubenswrapper[4875]: I1007 08:11:31.222069 4875 patch_prober.go:28] interesting pod/machine-config-daemon-hx68m container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 07 08:11:31 crc kubenswrapper[4875]: I1007 08:11:31.222161 4875 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" podUID="3928c10c-c3da-41eb-96b2-629d67cfb31f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 07 08:11:31 crc kubenswrapper[4875]: I1007 08:11:31.391849 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-zg72b"] Oct 07 08:11:31 crc kubenswrapper[4875]: I1007 08:11:31.411370 4875 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 07 08:11:31 crc kubenswrapper[4875]: I1007 08:11:31.424040 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-675f4bcbfc-zg72b" event={"ID":"c95ebd05-76be-47ee-ad07-c1e84d34a235","Type":"ContainerStarted","Data":"422247a19217e665002ad25da872b88b21fcca4307bece801a77f9e7580db2bd"} Oct 07 08:11:31 crc kubenswrapper[4875]: I1007 08:11:31.626252 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-4kj9j"] Oct 07 08:11:32 crc kubenswrapper[4875]: I1007 08:11:32.444982 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78dd6ddcc-4kj9j" event={"ID":"1d0e537c-bf41-486e-89dc-b90686db28e7","Type":"ContainerStarted","Data":"a6b55e21a7648aa8a6cccda4fc47a4b665d6e315595f807184a874995c0696fa"} Oct 07 08:11:33 crc kubenswrapper[4875]: I1007 08:11:33.349983 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-zg72b"] Oct 07 08:11:33 crc kubenswrapper[4875]: I1007 08:11:33.400191 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5ccc8479f9-sx8tp"] Oct 07 08:11:33 crc kubenswrapper[4875]: I1007 08:11:33.401725 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5ccc8479f9-sx8tp" Oct 07 08:11:33 crc kubenswrapper[4875]: I1007 08:11:33.408391 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5ccc8479f9-sx8tp"] Oct 07 08:11:33 crc kubenswrapper[4875]: I1007 08:11:33.548970 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q6w99\" (UniqueName: \"kubernetes.io/projected/12c7ef4c-d13d-4813-9e2f-37197f79207e-kube-api-access-q6w99\") pod \"dnsmasq-dns-5ccc8479f9-sx8tp\" (UID: \"12c7ef4c-d13d-4813-9e2f-37197f79207e\") " pod="openstack/dnsmasq-dns-5ccc8479f9-sx8tp" Oct 07 08:11:33 crc kubenswrapper[4875]: I1007 08:11:33.549110 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/12c7ef4c-d13d-4813-9e2f-37197f79207e-config\") pod \"dnsmasq-dns-5ccc8479f9-sx8tp\" (UID: \"12c7ef4c-d13d-4813-9e2f-37197f79207e\") " pod="openstack/dnsmasq-dns-5ccc8479f9-sx8tp" Oct 07 08:11:33 crc kubenswrapper[4875]: I1007 08:11:33.549143 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/12c7ef4c-d13d-4813-9e2f-37197f79207e-dns-svc\") pod \"dnsmasq-dns-5ccc8479f9-sx8tp\" (UID: \"12c7ef4c-d13d-4813-9e2f-37197f79207e\") " pod="openstack/dnsmasq-dns-5ccc8479f9-sx8tp" Oct 07 08:11:33 crc kubenswrapper[4875]: I1007 08:11:33.652906 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/12c7ef4c-d13d-4813-9e2f-37197f79207e-dns-svc\") pod \"dnsmasq-dns-5ccc8479f9-sx8tp\" (UID: \"12c7ef4c-d13d-4813-9e2f-37197f79207e\") " pod="openstack/dnsmasq-dns-5ccc8479f9-sx8tp" Oct 07 08:11:33 crc kubenswrapper[4875]: I1007 08:11:33.653010 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q6w99\" (UniqueName: \"kubernetes.io/projected/12c7ef4c-d13d-4813-9e2f-37197f79207e-kube-api-access-q6w99\") pod \"dnsmasq-dns-5ccc8479f9-sx8tp\" (UID: \"12c7ef4c-d13d-4813-9e2f-37197f79207e\") " pod="openstack/dnsmasq-dns-5ccc8479f9-sx8tp" Oct 07 08:11:33 crc kubenswrapper[4875]: I1007 08:11:33.653081 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/12c7ef4c-d13d-4813-9e2f-37197f79207e-config\") pod \"dnsmasq-dns-5ccc8479f9-sx8tp\" (UID: \"12c7ef4c-d13d-4813-9e2f-37197f79207e\") " pod="openstack/dnsmasq-dns-5ccc8479f9-sx8tp" Oct 07 08:11:33 crc kubenswrapper[4875]: I1007 08:11:33.654473 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/12c7ef4c-d13d-4813-9e2f-37197f79207e-dns-svc\") pod \"dnsmasq-dns-5ccc8479f9-sx8tp\" (UID: \"12c7ef4c-d13d-4813-9e2f-37197f79207e\") " pod="openstack/dnsmasq-dns-5ccc8479f9-sx8tp" Oct 07 08:11:33 crc kubenswrapper[4875]: I1007 08:11:33.655391 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/12c7ef4c-d13d-4813-9e2f-37197f79207e-config\") pod \"dnsmasq-dns-5ccc8479f9-sx8tp\" (UID: \"12c7ef4c-d13d-4813-9e2f-37197f79207e\") " pod="openstack/dnsmasq-dns-5ccc8479f9-sx8tp" Oct 07 08:11:33 crc kubenswrapper[4875]: I1007 08:11:33.702199 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q6w99\" (UniqueName: \"kubernetes.io/projected/12c7ef4c-d13d-4813-9e2f-37197f79207e-kube-api-access-q6w99\") pod \"dnsmasq-dns-5ccc8479f9-sx8tp\" (UID: \"12c7ef4c-d13d-4813-9e2f-37197f79207e\") " pod="openstack/dnsmasq-dns-5ccc8479f9-sx8tp" Oct 07 08:11:33 crc kubenswrapper[4875]: I1007 08:11:33.739063 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5ccc8479f9-sx8tp" Oct 07 08:11:33 crc kubenswrapper[4875]: I1007 08:11:33.741708 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-4kj9j"] Oct 07 08:11:33 crc kubenswrapper[4875]: I1007 08:11:33.754117 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-qbh8c"] Oct 07 08:11:33 crc kubenswrapper[4875]: I1007 08:11:33.758744 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-qbh8c" Oct 07 08:11:33 crc kubenswrapper[4875]: I1007 08:11:33.769144 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-qbh8c"] Oct 07 08:11:33 crc kubenswrapper[4875]: I1007 08:11:33.963286 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/13763859-5bdc-495b-b344-b2b8e0b7fd1e-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-qbh8c\" (UID: \"13763859-5bdc-495b-b344-b2b8e0b7fd1e\") " pod="openstack/dnsmasq-dns-57d769cc4f-qbh8c" Oct 07 08:11:33 crc kubenswrapper[4875]: I1007 08:11:33.963341 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/13763859-5bdc-495b-b344-b2b8e0b7fd1e-config\") pod \"dnsmasq-dns-57d769cc4f-qbh8c\" (UID: \"13763859-5bdc-495b-b344-b2b8e0b7fd1e\") " pod="openstack/dnsmasq-dns-57d769cc4f-qbh8c" Oct 07 08:11:33 crc kubenswrapper[4875]: I1007 08:11:33.963377 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bvnch\" (UniqueName: \"kubernetes.io/projected/13763859-5bdc-495b-b344-b2b8e0b7fd1e-kube-api-access-bvnch\") pod \"dnsmasq-dns-57d769cc4f-qbh8c\" (UID: \"13763859-5bdc-495b-b344-b2b8e0b7fd1e\") " pod="openstack/dnsmasq-dns-57d769cc4f-qbh8c" Oct 07 08:11:34 crc kubenswrapper[4875]: I1007 08:11:34.066774 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bvnch\" (UniqueName: \"kubernetes.io/projected/13763859-5bdc-495b-b344-b2b8e0b7fd1e-kube-api-access-bvnch\") pod \"dnsmasq-dns-57d769cc4f-qbh8c\" (UID: \"13763859-5bdc-495b-b344-b2b8e0b7fd1e\") " pod="openstack/dnsmasq-dns-57d769cc4f-qbh8c" Oct 07 08:11:34 crc kubenswrapper[4875]: I1007 08:11:34.067363 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/13763859-5bdc-495b-b344-b2b8e0b7fd1e-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-qbh8c\" (UID: \"13763859-5bdc-495b-b344-b2b8e0b7fd1e\") " pod="openstack/dnsmasq-dns-57d769cc4f-qbh8c" Oct 07 08:11:34 crc kubenswrapper[4875]: I1007 08:11:34.067390 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/13763859-5bdc-495b-b344-b2b8e0b7fd1e-config\") pod \"dnsmasq-dns-57d769cc4f-qbh8c\" (UID: \"13763859-5bdc-495b-b344-b2b8e0b7fd1e\") " pod="openstack/dnsmasq-dns-57d769cc4f-qbh8c" Oct 07 08:11:34 crc kubenswrapper[4875]: I1007 08:11:34.068395 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/13763859-5bdc-495b-b344-b2b8e0b7fd1e-config\") pod \"dnsmasq-dns-57d769cc4f-qbh8c\" (UID: \"13763859-5bdc-495b-b344-b2b8e0b7fd1e\") " pod="openstack/dnsmasq-dns-57d769cc4f-qbh8c" Oct 07 08:11:34 crc kubenswrapper[4875]: I1007 08:11:34.069276 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/13763859-5bdc-495b-b344-b2b8e0b7fd1e-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-qbh8c\" (UID: \"13763859-5bdc-495b-b344-b2b8e0b7fd1e\") " pod="openstack/dnsmasq-dns-57d769cc4f-qbh8c" Oct 07 08:11:34 crc kubenswrapper[4875]: I1007 08:11:34.108889 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bvnch\" (UniqueName: \"kubernetes.io/projected/13763859-5bdc-495b-b344-b2b8e0b7fd1e-kube-api-access-bvnch\") pod \"dnsmasq-dns-57d769cc4f-qbh8c\" (UID: \"13763859-5bdc-495b-b344-b2b8e0b7fd1e\") " pod="openstack/dnsmasq-dns-57d769cc4f-qbh8c" Oct 07 08:11:34 crc kubenswrapper[4875]: I1007 08:11:34.227231 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-qbh8c" Oct 07 08:11:34 crc kubenswrapper[4875]: I1007 08:11:34.557325 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5ccc8479f9-sx8tp"] Oct 07 08:11:34 crc kubenswrapper[4875]: W1007 08:11:34.563780 4875 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod12c7ef4c_d13d_4813_9e2f_37197f79207e.slice/crio-2d3664ae96c7e1ea6ebc14cd37af025e919e8bab6d24f3f7667cb0c9182a3ef4 WatchSource:0}: Error finding container 2d3664ae96c7e1ea6ebc14cd37af025e919e8bab6d24f3f7667cb0c9182a3ef4: Status 404 returned error can't find the container with id 2d3664ae96c7e1ea6ebc14cd37af025e919e8bab6d24f3f7667cb0c9182a3ef4 Oct 07 08:11:34 crc kubenswrapper[4875]: I1007 08:11:34.571301 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 07 08:11:34 crc kubenswrapper[4875]: I1007 08:11:34.573419 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Oct 07 08:11:34 crc kubenswrapper[4875]: I1007 08:11:34.577620 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Oct 07 08:11:34 crc kubenswrapper[4875]: I1007 08:11:34.577809 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Oct 07 08:11:34 crc kubenswrapper[4875]: I1007 08:11:34.578044 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Oct 07 08:11:34 crc kubenswrapper[4875]: I1007 08:11:34.578722 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Oct 07 08:11:34 crc kubenswrapper[4875]: I1007 08:11:34.578866 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Oct 07 08:11:34 crc kubenswrapper[4875]: I1007 08:11:34.579037 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-bkgpg" Oct 07 08:11:34 crc kubenswrapper[4875]: I1007 08:11:34.579325 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Oct 07 08:11:34 crc kubenswrapper[4875]: I1007 08:11:34.605731 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 07 08:11:34 crc kubenswrapper[4875]: I1007 08:11:34.684091 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/169a8fe1-831d-46f5-b939-e1507c89453e-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"169a8fe1-831d-46f5-b939-e1507c89453e\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 08:11:34 crc kubenswrapper[4875]: I1007 08:11:34.684214 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"169a8fe1-831d-46f5-b939-e1507c89453e\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 08:11:34 crc kubenswrapper[4875]: I1007 08:11:34.684271 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/169a8fe1-831d-46f5-b939-e1507c89453e-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"169a8fe1-831d-46f5-b939-e1507c89453e\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 08:11:34 crc kubenswrapper[4875]: I1007 08:11:34.684294 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/169a8fe1-831d-46f5-b939-e1507c89453e-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"169a8fe1-831d-46f5-b939-e1507c89453e\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 08:11:34 crc kubenswrapper[4875]: I1007 08:11:34.684312 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/169a8fe1-831d-46f5-b939-e1507c89453e-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"169a8fe1-831d-46f5-b939-e1507c89453e\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 08:11:34 crc kubenswrapper[4875]: I1007 08:11:34.684330 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rzzf5\" (UniqueName: \"kubernetes.io/projected/169a8fe1-831d-46f5-b939-e1507c89453e-kube-api-access-rzzf5\") pod \"rabbitmq-cell1-server-0\" (UID: \"169a8fe1-831d-46f5-b939-e1507c89453e\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 08:11:34 crc kubenswrapper[4875]: I1007 08:11:34.684349 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/169a8fe1-831d-46f5-b939-e1507c89453e-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"169a8fe1-831d-46f5-b939-e1507c89453e\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 08:11:34 crc kubenswrapper[4875]: I1007 08:11:34.684375 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/169a8fe1-831d-46f5-b939-e1507c89453e-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"169a8fe1-831d-46f5-b939-e1507c89453e\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 08:11:34 crc kubenswrapper[4875]: I1007 08:11:34.684404 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/169a8fe1-831d-46f5-b939-e1507c89453e-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"169a8fe1-831d-46f5-b939-e1507c89453e\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 08:11:34 crc kubenswrapper[4875]: I1007 08:11:34.684423 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/169a8fe1-831d-46f5-b939-e1507c89453e-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"169a8fe1-831d-46f5-b939-e1507c89453e\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 08:11:34 crc kubenswrapper[4875]: I1007 08:11:34.684461 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/169a8fe1-831d-46f5-b939-e1507c89453e-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"169a8fe1-831d-46f5-b939-e1507c89453e\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 08:11:34 crc kubenswrapper[4875]: I1007 08:11:34.786242 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/169a8fe1-831d-46f5-b939-e1507c89453e-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"169a8fe1-831d-46f5-b939-e1507c89453e\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 08:11:34 crc kubenswrapper[4875]: I1007 08:11:34.786307 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/169a8fe1-831d-46f5-b939-e1507c89453e-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"169a8fe1-831d-46f5-b939-e1507c89453e\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 08:11:34 crc kubenswrapper[4875]: I1007 08:11:34.786332 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/169a8fe1-831d-46f5-b939-e1507c89453e-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"169a8fe1-831d-46f5-b939-e1507c89453e\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 08:11:34 crc kubenswrapper[4875]: I1007 08:11:34.786389 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/169a8fe1-831d-46f5-b939-e1507c89453e-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"169a8fe1-831d-46f5-b939-e1507c89453e\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 08:11:34 crc kubenswrapper[4875]: I1007 08:11:34.786637 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/169a8fe1-831d-46f5-b939-e1507c89453e-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"169a8fe1-831d-46f5-b939-e1507c89453e\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 08:11:34 crc kubenswrapper[4875]: I1007 08:11:34.786679 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"169a8fe1-831d-46f5-b939-e1507c89453e\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 08:11:34 crc kubenswrapper[4875]: I1007 08:11:34.786727 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/169a8fe1-831d-46f5-b939-e1507c89453e-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"169a8fe1-831d-46f5-b939-e1507c89453e\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 08:11:34 crc kubenswrapper[4875]: I1007 08:11:34.786747 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/169a8fe1-831d-46f5-b939-e1507c89453e-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"169a8fe1-831d-46f5-b939-e1507c89453e\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 08:11:34 crc kubenswrapper[4875]: I1007 08:11:34.786765 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/169a8fe1-831d-46f5-b939-e1507c89453e-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"169a8fe1-831d-46f5-b939-e1507c89453e\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 08:11:34 crc kubenswrapper[4875]: I1007 08:11:34.786792 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/169a8fe1-831d-46f5-b939-e1507c89453e-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"169a8fe1-831d-46f5-b939-e1507c89453e\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 08:11:34 crc kubenswrapper[4875]: I1007 08:11:34.786810 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rzzf5\" (UniqueName: \"kubernetes.io/projected/169a8fe1-831d-46f5-b939-e1507c89453e-kube-api-access-rzzf5\") pod \"rabbitmq-cell1-server-0\" (UID: \"169a8fe1-831d-46f5-b939-e1507c89453e\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 08:11:34 crc kubenswrapper[4875]: I1007 08:11:34.787125 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/169a8fe1-831d-46f5-b939-e1507c89453e-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"169a8fe1-831d-46f5-b939-e1507c89453e\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 08:11:34 crc kubenswrapper[4875]: I1007 08:11:34.787452 4875 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"169a8fe1-831d-46f5-b939-e1507c89453e\") device mount path \"/mnt/openstack/pv03\"" pod="openstack/rabbitmq-cell1-server-0" Oct 07 08:11:34 crc kubenswrapper[4875]: I1007 08:11:34.787729 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/169a8fe1-831d-46f5-b939-e1507c89453e-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"169a8fe1-831d-46f5-b939-e1507c89453e\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 08:11:34 crc kubenswrapper[4875]: I1007 08:11:34.788562 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/169a8fe1-831d-46f5-b939-e1507c89453e-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"169a8fe1-831d-46f5-b939-e1507c89453e\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 08:11:34 crc kubenswrapper[4875]: I1007 08:11:34.790080 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/169a8fe1-831d-46f5-b939-e1507c89453e-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"169a8fe1-831d-46f5-b939-e1507c89453e\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 08:11:34 crc kubenswrapper[4875]: I1007 08:11:34.791421 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/169a8fe1-831d-46f5-b939-e1507c89453e-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"169a8fe1-831d-46f5-b939-e1507c89453e\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 08:11:34 crc kubenswrapper[4875]: I1007 08:11:34.794467 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/169a8fe1-831d-46f5-b939-e1507c89453e-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"169a8fe1-831d-46f5-b939-e1507c89453e\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 08:11:34 crc kubenswrapper[4875]: I1007 08:11:34.795408 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/169a8fe1-831d-46f5-b939-e1507c89453e-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"169a8fe1-831d-46f5-b939-e1507c89453e\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 08:11:34 crc kubenswrapper[4875]: I1007 08:11:34.795716 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/169a8fe1-831d-46f5-b939-e1507c89453e-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"169a8fe1-831d-46f5-b939-e1507c89453e\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 08:11:34 crc kubenswrapper[4875]: I1007 08:11:34.809982 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/169a8fe1-831d-46f5-b939-e1507c89453e-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"169a8fe1-831d-46f5-b939-e1507c89453e\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 08:11:34 crc kubenswrapper[4875]: I1007 08:11:34.821848 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rzzf5\" (UniqueName: \"kubernetes.io/projected/169a8fe1-831d-46f5-b939-e1507c89453e-kube-api-access-rzzf5\") pod \"rabbitmq-cell1-server-0\" (UID: \"169a8fe1-831d-46f5-b939-e1507c89453e\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 08:11:34 crc kubenswrapper[4875]: I1007 08:11:34.825341 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"169a8fe1-831d-46f5-b939-e1507c89453e\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 08:11:34 crc kubenswrapper[4875]: I1007 08:11:34.835520 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-qbh8c"] Oct 07 08:11:34 crc kubenswrapper[4875]: I1007 08:11:34.910407 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Oct 07 08:11:34 crc kubenswrapper[4875]: I1007 08:11:34.962181 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Oct 07 08:11:34 crc kubenswrapper[4875]: I1007 08:11:34.964119 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Oct 07 08:11:34 crc kubenswrapper[4875]: I1007 08:11:34.968046 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-d8zjz" Oct 07 08:11:34 crc kubenswrapper[4875]: I1007 08:11:34.968270 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Oct 07 08:11:34 crc kubenswrapper[4875]: I1007 08:11:34.969320 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Oct 07 08:11:34 crc kubenswrapper[4875]: I1007 08:11:34.969416 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Oct 07 08:11:34 crc kubenswrapper[4875]: I1007 08:11:34.969340 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Oct 07 08:11:34 crc kubenswrapper[4875]: I1007 08:11:34.969633 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Oct 07 08:11:34 crc kubenswrapper[4875]: I1007 08:11:34.969708 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Oct 07 08:11:35 crc kubenswrapper[4875]: I1007 08:11:35.001076 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Oct 07 08:11:35 crc kubenswrapper[4875]: I1007 08:11:35.092368 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/06c908ee-f087-4e43-904f-5cc1e01a2464-config-data\") pod \"rabbitmq-server-0\" (UID: \"06c908ee-f087-4e43-904f-5cc1e01a2464\") " pod="openstack/rabbitmq-server-0" Oct 07 08:11:35 crc kubenswrapper[4875]: I1007 08:11:35.092779 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/06c908ee-f087-4e43-904f-5cc1e01a2464-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"06c908ee-f087-4e43-904f-5cc1e01a2464\") " pod="openstack/rabbitmq-server-0" Oct 07 08:11:35 crc kubenswrapper[4875]: I1007 08:11:35.092814 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/06c908ee-f087-4e43-904f-5cc1e01a2464-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"06c908ee-f087-4e43-904f-5cc1e01a2464\") " pod="openstack/rabbitmq-server-0" Oct 07 08:11:35 crc kubenswrapper[4875]: I1007 08:11:35.092851 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/06c908ee-f087-4e43-904f-5cc1e01a2464-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"06c908ee-f087-4e43-904f-5cc1e01a2464\") " pod="openstack/rabbitmq-server-0" Oct 07 08:11:35 crc kubenswrapper[4875]: I1007 08:11:35.092887 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jb85f\" (UniqueName: \"kubernetes.io/projected/06c908ee-f087-4e43-904f-5cc1e01a2464-kube-api-access-jb85f\") pod \"rabbitmq-server-0\" (UID: \"06c908ee-f087-4e43-904f-5cc1e01a2464\") " pod="openstack/rabbitmq-server-0" Oct 07 08:11:35 crc kubenswrapper[4875]: I1007 08:11:35.092908 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/06c908ee-f087-4e43-904f-5cc1e01a2464-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"06c908ee-f087-4e43-904f-5cc1e01a2464\") " pod="openstack/rabbitmq-server-0" Oct 07 08:11:35 crc kubenswrapper[4875]: I1007 08:11:35.092940 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-server-0\" (UID: \"06c908ee-f087-4e43-904f-5cc1e01a2464\") " pod="openstack/rabbitmq-server-0" Oct 07 08:11:35 crc kubenswrapper[4875]: I1007 08:11:35.092968 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/06c908ee-f087-4e43-904f-5cc1e01a2464-server-conf\") pod \"rabbitmq-server-0\" (UID: \"06c908ee-f087-4e43-904f-5cc1e01a2464\") " pod="openstack/rabbitmq-server-0" Oct 07 08:11:35 crc kubenswrapper[4875]: I1007 08:11:35.092999 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/06c908ee-f087-4e43-904f-5cc1e01a2464-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"06c908ee-f087-4e43-904f-5cc1e01a2464\") " pod="openstack/rabbitmq-server-0" Oct 07 08:11:35 crc kubenswrapper[4875]: I1007 08:11:35.093061 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/06c908ee-f087-4e43-904f-5cc1e01a2464-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"06c908ee-f087-4e43-904f-5cc1e01a2464\") " pod="openstack/rabbitmq-server-0" Oct 07 08:11:35 crc kubenswrapper[4875]: I1007 08:11:35.093077 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/06c908ee-f087-4e43-904f-5cc1e01a2464-pod-info\") pod \"rabbitmq-server-0\" (UID: \"06c908ee-f087-4e43-904f-5cc1e01a2464\") " pod="openstack/rabbitmq-server-0" Oct 07 08:11:35 crc kubenswrapper[4875]: I1007 08:11:35.194708 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/06c908ee-f087-4e43-904f-5cc1e01a2464-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"06c908ee-f087-4e43-904f-5cc1e01a2464\") " pod="openstack/rabbitmq-server-0" Oct 07 08:11:35 crc kubenswrapper[4875]: I1007 08:11:35.194753 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/06c908ee-f087-4e43-904f-5cc1e01a2464-pod-info\") pod \"rabbitmq-server-0\" (UID: \"06c908ee-f087-4e43-904f-5cc1e01a2464\") " pod="openstack/rabbitmq-server-0" Oct 07 08:11:35 crc kubenswrapper[4875]: I1007 08:11:35.194783 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/06c908ee-f087-4e43-904f-5cc1e01a2464-config-data\") pod \"rabbitmq-server-0\" (UID: \"06c908ee-f087-4e43-904f-5cc1e01a2464\") " pod="openstack/rabbitmq-server-0" Oct 07 08:11:35 crc kubenswrapper[4875]: I1007 08:11:35.194814 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/06c908ee-f087-4e43-904f-5cc1e01a2464-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"06c908ee-f087-4e43-904f-5cc1e01a2464\") " pod="openstack/rabbitmq-server-0" Oct 07 08:11:35 crc kubenswrapper[4875]: I1007 08:11:35.194834 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/06c908ee-f087-4e43-904f-5cc1e01a2464-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"06c908ee-f087-4e43-904f-5cc1e01a2464\") " pod="openstack/rabbitmq-server-0" Oct 07 08:11:35 crc kubenswrapper[4875]: I1007 08:11:35.194860 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/06c908ee-f087-4e43-904f-5cc1e01a2464-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"06c908ee-f087-4e43-904f-5cc1e01a2464\") " pod="openstack/rabbitmq-server-0" Oct 07 08:11:35 crc kubenswrapper[4875]: I1007 08:11:35.194895 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jb85f\" (UniqueName: \"kubernetes.io/projected/06c908ee-f087-4e43-904f-5cc1e01a2464-kube-api-access-jb85f\") pod \"rabbitmq-server-0\" (UID: \"06c908ee-f087-4e43-904f-5cc1e01a2464\") " pod="openstack/rabbitmq-server-0" Oct 07 08:11:35 crc kubenswrapper[4875]: I1007 08:11:35.194914 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/06c908ee-f087-4e43-904f-5cc1e01a2464-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"06c908ee-f087-4e43-904f-5cc1e01a2464\") " pod="openstack/rabbitmq-server-0" Oct 07 08:11:35 crc kubenswrapper[4875]: I1007 08:11:35.194939 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-server-0\" (UID: \"06c908ee-f087-4e43-904f-5cc1e01a2464\") " pod="openstack/rabbitmq-server-0" Oct 07 08:11:35 crc kubenswrapper[4875]: I1007 08:11:35.194959 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/06c908ee-f087-4e43-904f-5cc1e01a2464-server-conf\") pod \"rabbitmq-server-0\" (UID: \"06c908ee-f087-4e43-904f-5cc1e01a2464\") " pod="openstack/rabbitmq-server-0" Oct 07 08:11:35 crc kubenswrapper[4875]: I1007 08:11:35.194982 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/06c908ee-f087-4e43-904f-5cc1e01a2464-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"06c908ee-f087-4e43-904f-5cc1e01a2464\") " pod="openstack/rabbitmq-server-0" Oct 07 08:11:35 crc kubenswrapper[4875]: I1007 08:11:35.195423 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/06c908ee-f087-4e43-904f-5cc1e01a2464-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"06c908ee-f087-4e43-904f-5cc1e01a2464\") " pod="openstack/rabbitmq-server-0" Oct 07 08:11:35 crc kubenswrapper[4875]: I1007 08:11:35.196166 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/06c908ee-f087-4e43-904f-5cc1e01a2464-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"06c908ee-f087-4e43-904f-5cc1e01a2464\") " pod="openstack/rabbitmq-server-0" Oct 07 08:11:35 crc kubenswrapper[4875]: I1007 08:11:35.196470 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/06c908ee-f087-4e43-904f-5cc1e01a2464-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"06c908ee-f087-4e43-904f-5cc1e01a2464\") " pod="openstack/rabbitmq-server-0" Oct 07 08:11:35 crc kubenswrapper[4875]: I1007 08:11:35.197192 4875 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-server-0\" (UID: \"06c908ee-f087-4e43-904f-5cc1e01a2464\") device mount path \"/mnt/openstack/pv01\"" pod="openstack/rabbitmq-server-0" Oct 07 08:11:35 crc kubenswrapper[4875]: I1007 08:11:35.197893 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/06c908ee-f087-4e43-904f-5cc1e01a2464-server-conf\") pod \"rabbitmq-server-0\" (UID: \"06c908ee-f087-4e43-904f-5cc1e01a2464\") " pod="openstack/rabbitmq-server-0" Oct 07 08:11:35 crc kubenswrapper[4875]: I1007 08:11:35.200002 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/06c908ee-f087-4e43-904f-5cc1e01a2464-config-data\") pod \"rabbitmq-server-0\" (UID: \"06c908ee-f087-4e43-904f-5cc1e01a2464\") " pod="openstack/rabbitmq-server-0" Oct 07 08:11:35 crc kubenswrapper[4875]: I1007 08:11:35.201063 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/06c908ee-f087-4e43-904f-5cc1e01a2464-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"06c908ee-f087-4e43-904f-5cc1e01a2464\") " pod="openstack/rabbitmq-server-0" Oct 07 08:11:35 crc kubenswrapper[4875]: I1007 08:11:35.202122 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/06c908ee-f087-4e43-904f-5cc1e01a2464-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"06c908ee-f087-4e43-904f-5cc1e01a2464\") " pod="openstack/rabbitmq-server-0" Oct 07 08:11:35 crc kubenswrapper[4875]: I1007 08:11:35.204717 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/06c908ee-f087-4e43-904f-5cc1e01a2464-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"06c908ee-f087-4e43-904f-5cc1e01a2464\") " pod="openstack/rabbitmq-server-0" Oct 07 08:11:35 crc kubenswrapper[4875]: I1007 08:11:35.201616 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/06c908ee-f087-4e43-904f-5cc1e01a2464-pod-info\") pod \"rabbitmq-server-0\" (UID: \"06c908ee-f087-4e43-904f-5cc1e01a2464\") " pod="openstack/rabbitmq-server-0" Oct 07 08:11:35 crc kubenswrapper[4875]: I1007 08:11:35.222321 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jb85f\" (UniqueName: \"kubernetes.io/projected/06c908ee-f087-4e43-904f-5cc1e01a2464-kube-api-access-jb85f\") pod \"rabbitmq-server-0\" (UID: \"06c908ee-f087-4e43-904f-5cc1e01a2464\") " pod="openstack/rabbitmq-server-0" Oct 07 08:11:35 crc kubenswrapper[4875]: I1007 08:11:35.224421 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-server-0\" (UID: \"06c908ee-f087-4e43-904f-5cc1e01a2464\") " pod="openstack/rabbitmq-server-0" Oct 07 08:11:35 crc kubenswrapper[4875]: I1007 08:11:35.267635 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 07 08:11:35 crc kubenswrapper[4875]: I1007 08:11:35.301188 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Oct 07 08:11:35 crc kubenswrapper[4875]: I1007 08:11:35.493532 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"169a8fe1-831d-46f5-b939-e1507c89453e","Type":"ContainerStarted","Data":"a5a5892ee262ccef7372f29a25607fc3aef0237513b20cff031c3eea6883ed83"} Oct 07 08:11:35 crc kubenswrapper[4875]: I1007 08:11:35.495471 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-qbh8c" event={"ID":"13763859-5bdc-495b-b344-b2b8e0b7fd1e","Type":"ContainerStarted","Data":"44af87e8f257af7c4a676b6255f7dc5adb2c7be90b4a47345d284f17c9bb101d"} Oct 07 08:11:35 crc kubenswrapper[4875]: I1007 08:11:35.501113 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5ccc8479f9-sx8tp" event={"ID":"12c7ef4c-d13d-4813-9e2f-37197f79207e","Type":"ContainerStarted","Data":"2d3664ae96c7e1ea6ebc14cd37af025e919e8bab6d24f3f7667cb0c9182a3ef4"} Oct 07 08:11:35 crc kubenswrapper[4875]: I1007 08:11:35.927134 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Oct 07 08:11:35 crc kubenswrapper[4875]: W1007 08:11:35.951325 4875 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod06c908ee_f087_4e43_904f_5cc1e01a2464.slice/crio-d9f59a48e106895daf6c55d2ad91d258ceda166df17cc233fcdab1cc4d24b26a WatchSource:0}: Error finding container d9f59a48e106895daf6c55d2ad91d258ceda166df17cc233fcdab1cc4d24b26a: Status 404 returned error can't find the container with id d9f59a48e106895daf6c55d2ad91d258ceda166df17cc233fcdab1cc4d24b26a Oct 07 08:11:36 crc kubenswrapper[4875]: I1007 08:11:36.513774 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"06c908ee-f087-4e43-904f-5cc1e01a2464","Type":"ContainerStarted","Data":"d9f59a48e106895daf6c55d2ad91d258ceda166df17cc233fcdab1cc4d24b26a"} Oct 07 08:11:37 crc kubenswrapper[4875]: I1007 08:11:37.433855 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-cell1-galera-0"] Oct 07 08:11:37 crc kubenswrapper[4875]: I1007 08:11:37.436736 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Oct 07 08:11:37 crc kubenswrapper[4875]: I1007 08:11:37.443647 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-cell1-svc" Oct 07 08:11:37 crc kubenswrapper[4875]: I1007 08:11:37.443891 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-config-data" Oct 07 08:11:37 crc kubenswrapper[4875]: I1007 08:11:37.443643 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Oct 07 08:11:37 crc kubenswrapper[4875]: I1007 08:11:37.444036 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-cell1-dockercfg-5djfr" Oct 07 08:11:37 crc kubenswrapper[4875]: I1007 08:11:37.444060 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-scripts" Oct 07 08:11:37 crc kubenswrapper[4875]: I1007 08:11:37.446737 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Oct 07 08:11:37 crc kubenswrapper[4875]: I1007 08:11:37.453688 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"combined-ca-bundle" Oct 07 08:11:37 crc kubenswrapper[4875]: I1007 08:11:37.470371 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-galera-0"] Oct 07 08:11:37 crc kubenswrapper[4875]: I1007 08:11:37.476232 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Oct 07 08:11:37 crc kubenswrapper[4875]: I1007 08:11:37.489889 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-svc" Oct 07 08:11:37 crc kubenswrapper[4875]: I1007 08:11:37.490438 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config-data" Oct 07 08:11:37 crc kubenswrapper[4875]: I1007 08:11:37.492155 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-dockercfg-7wg46" Oct 07 08:11:37 crc kubenswrapper[4875]: I1007 08:11:37.493632 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-scripts" Oct 07 08:11:37 crc kubenswrapper[4875]: I1007 08:11:37.523918 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Oct 07 08:11:37 crc kubenswrapper[4875]: I1007 08:11:37.555245 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/8dad2a19-0fbf-46b6-b534-7f2712b644d7-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"8dad2a19-0fbf-46b6-b534-7f2712b644d7\") " pod="openstack/openstack-cell1-galera-0" Oct 07 08:11:37 crc kubenswrapper[4875]: I1007 08:11:37.555343 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/027e3c4c-1861-4933-9d30-636575099b5c-secrets\") pod \"openstack-galera-0\" (UID: \"027e3c4c-1861-4933-9d30-636575099b5c\") " pod="openstack/openstack-galera-0" Oct 07 08:11:37 crc kubenswrapper[4875]: I1007 08:11:37.555406 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/027e3c4c-1861-4933-9d30-636575099b5c-config-data-generated\") pod \"openstack-galera-0\" (UID: \"027e3c4c-1861-4933-9d30-636575099b5c\") " pod="openstack/openstack-galera-0" Oct 07 08:11:37 crc kubenswrapper[4875]: I1007 08:11:37.555436 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"openstack-galera-0\" (UID: \"027e3c4c-1861-4933-9d30-636575099b5c\") " pod="openstack/openstack-galera-0" Oct 07 08:11:37 crc kubenswrapper[4875]: I1007 08:11:37.555941 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dt2rt\" (UniqueName: \"kubernetes.io/projected/027e3c4c-1861-4933-9d30-636575099b5c-kube-api-access-dt2rt\") pod \"openstack-galera-0\" (UID: \"027e3c4c-1861-4933-9d30-636575099b5c\") " pod="openstack/openstack-galera-0" Oct 07 08:11:37 crc kubenswrapper[4875]: I1007 08:11:37.555990 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/8dad2a19-0fbf-46b6-b534-7f2712b644d7-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"8dad2a19-0fbf-46b6-b534-7f2712b644d7\") " pod="openstack/openstack-cell1-galera-0" Oct 07 08:11:37 crc kubenswrapper[4875]: I1007 08:11:37.556022 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"openstack-cell1-galera-0\" (UID: \"8dad2a19-0fbf-46b6-b534-7f2712b644d7\") " pod="openstack/openstack-cell1-galera-0" Oct 07 08:11:37 crc kubenswrapper[4875]: I1007 08:11:37.556142 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8dad2a19-0fbf-46b6-b534-7f2712b644d7-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"8dad2a19-0fbf-46b6-b534-7f2712b644d7\") " pod="openstack/openstack-cell1-galera-0" Oct 07 08:11:37 crc kubenswrapper[4875]: I1007 08:11:37.556230 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/027e3c4c-1861-4933-9d30-636575099b5c-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"027e3c4c-1861-4933-9d30-636575099b5c\") " pod="openstack/openstack-galera-0" Oct 07 08:11:37 crc kubenswrapper[4875]: I1007 08:11:37.556292 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/027e3c4c-1861-4933-9d30-636575099b5c-config-data-default\") pod \"openstack-galera-0\" (UID: \"027e3c4c-1861-4933-9d30-636575099b5c\") " pod="openstack/openstack-galera-0" Oct 07 08:11:37 crc kubenswrapper[4875]: I1007 08:11:37.556323 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/8dad2a19-0fbf-46b6-b534-7f2712b644d7-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"8dad2a19-0fbf-46b6-b534-7f2712b644d7\") " pod="openstack/openstack-cell1-galera-0" Oct 07 08:11:37 crc kubenswrapper[4875]: I1007 08:11:37.556365 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8dad2a19-0fbf-46b6-b534-7f2712b644d7-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"8dad2a19-0fbf-46b6-b534-7f2712b644d7\") " pod="openstack/openstack-cell1-galera-0" Oct 07 08:11:37 crc kubenswrapper[4875]: I1007 08:11:37.556398 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/027e3c4c-1861-4933-9d30-636575099b5c-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"027e3c4c-1861-4933-9d30-636575099b5c\") " pod="openstack/openstack-galera-0" Oct 07 08:11:37 crc kubenswrapper[4875]: I1007 08:11:37.556864 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/027e3c4c-1861-4933-9d30-636575099b5c-operator-scripts\") pod \"openstack-galera-0\" (UID: \"027e3c4c-1861-4933-9d30-636575099b5c\") " pod="openstack/openstack-galera-0" Oct 07 08:11:37 crc kubenswrapper[4875]: I1007 08:11:37.557068 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/8dad2a19-0fbf-46b6-b534-7f2712b644d7-secrets\") pod \"openstack-cell1-galera-0\" (UID: \"8dad2a19-0fbf-46b6-b534-7f2712b644d7\") " pod="openstack/openstack-cell1-galera-0" Oct 07 08:11:37 crc kubenswrapper[4875]: I1007 08:11:37.557188 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-crdnz\" (UniqueName: \"kubernetes.io/projected/8dad2a19-0fbf-46b6-b534-7f2712b644d7-kube-api-access-crdnz\") pod \"openstack-cell1-galera-0\" (UID: \"8dad2a19-0fbf-46b6-b534-7f2712b644d7\") " pod="openstack/openstack-cell1-galera-0" Oct 07 08:11:37 crc kubenswrapper[4875]: I1007 08:11:37.557220 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/027e3c4c-1861-4933-9d30-636575099b5c-kolla-config\") pod \"openstack-galera-0\" (UID: \"027e3c4c-1861-4933-9d30-636575099b5c\") " pod="openstack/openstack-galera-0" Oct 07 08:11:37 crc kubenswrapper[4875]: I1007 08:11:37.557249 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/8dad2a19-0fbf-46b6-b534-7f2712b644d7-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"8dad2a19-0fbf-46b6-b534-7f2712b644d7\") " pod="openstack/openstack-cell1-galera-0" Oct 07 08:11:37 crc kubenswrapper[4875]: I1007 08:11:37.659117 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/027e3c4c-1861-4933-9d30-636575099b5c-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"027e3c4c-1861-4933-9d30-636575099b5c\") " pod="openstack/openstack-galera-0" Oct 07 08:11:37 crc kubenswrapper[4875]: I1007 08:11:37.659584 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/027e3c4c-1861-4933-9d30-636575099b5c-config-data-default\") pod \"openstack-galera-0\" (UID: \"027e3c4c-1861-4933-9d30-636575099b5c\") " pod="openstack/openstack-galera-0" Oct 07 08:11:37 crc kubenswrapper[4875]: I1007 08:11:37.659621 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/8dad2a19-0fbf-46b6-b534-7f2712b644d7-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"8dad2a19-0fbf-46b6-b534-7f2712b644d7\") " pod="openstack/openstack-cell1-galera-0" Oct 07 08:11:37 crc kubenswrapper[4875]: I1007 08:11:37.659646 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8dad2a19-0fbf-46b6-b534-7f2712b644d7-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"8dad2a19-0fbf-46b6-b534-7f2712b644d7\") " pod="openstack/openstack-cell1-galera-0" Oct 07 08:11:37 crc kubenswrapper[4875]: I1007 08:11:37.659678 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/027e3c4c-1861-4933-9d30-636575099b5c-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"027e3c4c-1861-4933-9d30-636575099b5c\") " pod="openstack/openstack-galera-0" Oct 07 08:11:37 crc kubenswrapper[4875]: I1007 08:11:37.659713 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/027e3c4c-1861-4933-9d30-636575099b5c-operator-scripts\") pod \"openstack-galera-0\" (UID: \"027e3c4c-1861-4933-9d30-636575099b5c\") " pod="openstack/openstack-galera-0" Oct 07 08:11:37 crc kubenswrapper[4875]: I1007 08:11:37.659767 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/8dad2a19-0fbf-46b6-b534-7f2712b644d7-secrets\") pod \"openstack-cell1-galera-0\" (UID: \"8dad2a19-0fbf-46b6-b534-7f2712b644d7\") " pod="openstack/openstack-cell1-galera-0" Oct 07 08:11:37 crc kubenswrapper[4875]: I1007 08:11:37.659823 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-crdnz\" (UniqueName: \"kubernetes.io/projected/8dad2a19-0fbf-46b6-b534-7f2712b644d7-kube-api-access-crdnz\") pod \"openstack-cell1-galera-0\" (UID: \"8dad2a19-0fbf-46b6-b534-7f2712b644d7\") " pod="openstack/openstack-cell1-galera-0" Oct 07 08:11:37 crc kubenswrapper[4875]: I1007 08:11:37.659849 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/027e3c4c-1861-4933-9d30-636575099b5c-kolla-config\") pod \"openstack-galera-0\" (UID: \"027e3c4c-1861-4933-9d30-636575099b5c\") " pod="openstack/openstack-galera-0" Oct 07 08:11:37 crc kubenswrapper[4875]: I1007 08:11:37.659891 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/8dad2a19-0fbf-46b6-b534-7f2712b644d7-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"8dad2a19-0fbf-46b6-b534-7f2712b644d7\") " pod="openstack/openstack-cell1-galera-0" Oct 07 08:11:37 crc kubenswrapper[4875]: I1007 08:11:37.659924 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/8dad2a19-0fbf-46b6-b534-7f2712b644d7-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"8dad2a19-0fbf-46b6-b534-7f2712b644d7\") " pod="openstack/openstack-cell1-galera-0" Oct 07 08:11:37 crc kubenswrapper[4875]: I1007 08:11:37.659949 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/027e3c4c-1861-4933-9d30-636575099b5c-secrets\") pod \"openstack-galera-0\" (UID: \"027e3c4c-1861-4933-9d30-636575099b5c\") " pod="openstack/openstack-galera-0" Oct 07 08:11:37 crc kubenswrapper[4875]: I1007 08:11:37.659967 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/027e3c4c-1861-4933-9d30-636575099b5c-config-data-generated\") pod \"openstack-galera-0\" (UID: \"027e3c4c-1861-4933-9d30-636575099b5c\") " pod="openstack/openstack-galera-0" Oct 07 08:11:37 crc kubenswrapper[4875]: I1007 08:11:37.659986 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"openstack-galera-0\" (UID: \"027e3c4c-1861-4933-9d30-636575099b5c\") " pod="openstack/openstack-galera-0" Oct 07 08:11:37 crc kubenswrapper[4875]: I1007 08:11:37.660006 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dt2rt\" (UniqueName: \"kubernetes.io/projected/027e3c4c-1861-4933-9d30-636575099b5c-kube-api-access-dt2rt\") pod \"openstack-galera-0\" (UID: \"027e3c4c-1861-4933-9d30-636575099b5c\") " pod="openstack/openstack-galera-0" Oct 07 08:11:37 crc kubenswrapper[4875]: I1007 08:11:37.660023 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/8dad2a19-0fbf-46b6-b534-7f2712b644d7-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"8dad2a19-0fbf-46b6-b534-7f2712b644d7\") " pod="openstack/openstack-cell1-galera-0" Oct 07 08:11:37 crc kubenswrapper[4875]: I1007 08:11:37.660047 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"openstack-cell1-galera-0\" (UID: \"8dad2a19-0fbf-46b6-b534-7f2712b644d7\") " pod="openstack/openstack-cell1-galera-0" Oct 07 08:11:37 crc kubenswrapper[4875]: I1007 08:11:37.660064 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8dad2a19-0fbf-46b6-b534-7f2712b644d7-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"8dad2a19-0fbf-46b6-b534-7f2712b644d7\") " pod="openstack/openstack-cell1-galera-0" Oct 07 08:11:37 crc kubenswrapper[4875]: I1007 08:11:37.661020 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/027e3c4c-1861-4933-9d30-636575099b5c-config-data-generated\") pod \"openstack-galera-0\" (UID: \"027e3c4c-1861-4933-9d30-636575099b5c\") " pod="openstack/openstack-galera-0" Oct 07 08:11:37 crc kubenswrapper[4875]: I1007 08:11:37.661732 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/027e3c4c-1861-4933-9d30-636575099b5c-kolla-config\") pod \"openstack-galera-0\" (UID: \"027e3c4c-1861-4933-9d30-636575099b5c\") " pod="openstack/openstack-galera-0" Oct 07 08:11:37 crc kubenswrapper[4875]: I1007 08:11:37.662432 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8dad2a19-0fbf-46b6-b534-7f2712b644d7-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"8dad2a19-0fbf-46b6-b534-7f2712b644d7\") " pod="openstack/openstack-cell1-galera-0" Oct 07 08:11:37 crc kubenswrapper[4875]: I1007 08:11:37.662851 4875 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"openstack-cell1-galera-0\" (UID: \"8dad2a19-0fbf-46b6-b534-7f2712b644d7\") device mount path \"/mnt/openstack/pv02\"" pod="openstack/openstack-cell1-galera-0" Oct 07 08:11:37 crc kubenswrapper[4875]: I1007 08:11:37.663025 4875 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"openstack-galera-0\" (UID: \"027e3c4c-1861-4933-9d30-636575099b5c\") device mount path \"/mnt/openstack/pv09\"" pod="openstack/openstack-galera-0" Oct 07 08:11:37 crc kubenswrapper[4875]: I1007 08:11:37.663349 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/8dad2a19-0fbf-46b6-b534-7f2712b644d7-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"8dad2a19-0fbf-46b6-b534-7f2712b644d7\") " pod="openstack/openstack-cell1-galera-0" Oct 07 08:11:37 crc kubenswrapper[4875]: I1007 08:11:37.663941 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/8dad2a19-0fbf-46b6-b534-7f2712b644d7-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"8dad2a19-0fbf-46b6-b534-7f2712b644d7\") " pod="openstack/openstack-cell1-galera-0" Oct 07 08:11:37 crc kubenswrapper[4875]: I1007 08:11:37.664225 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/8dad2a19-0fbf-46b6-b534-7f2712b644d7-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"8dad2a19-0fbf-46b6-b534-7f2712b644d7\") " pod="openstack/openstack-cell1-galera-0" Oct 07 08:11:37 crc kubenswrapper[4875]: I1007 08:11:37.664732 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/027e3c4c-1861-4933-9d30-636575099b5c-config-data-default\") pod \"openstack-galera-0\" (UID: \"027e3c4c-1861-4933-9d30-636575099b5c\") " pod="openstack/openstack-galera-0" Oct 07 08:11:37 crc kubenswrapper[4875]: I1007 08:11:37.665983 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/027e3c4c-1861-4933-9d30-636575099b5c-operator-scripts\") pod \"openstack-galera-0\" (UID: \"027e3c4c-1861-4933-9d30-636575099b5c\") " pod="openstack/openstack-galera-0" Oct 07 08:11:37 crc kubenswrapper[4875]: I1007 08:11:37.671778 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/027e3c4c-1861-4933-9d30-636575099b5c-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"027e3c4c-1861-4933-9d30-636575099b5c\") " pod="openstack/openstack-galera-0" Oct 07 08:11:37 crc kubenswrapper[4875]: I1007 08:11:37.682414 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/027e3c4c-1861-4933-9d30-636575099b5c-secrets\") pod \"openstack-galera-0\" (UID: \"027e3c4c-1861-4933-9d30-636575099b5c\") " pod="openstack/openstack-galera-0" Oct 07 08:11:37 crc kubenswrapper[4875]: I1007 08:11:37.683724 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8dad2a19-0fbf-46b6-b534-7f2712b644d7-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"8dad2a19-0fbf-46b6-b534-7f2712b644d7\") " pod="openstack/openstack-cell1-galera-0" Oct 07 08:11:37 crc kubenswrapper[4875]: I1007 08:11:37.685664 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dt2rt\" (UniqueName: \"kubernetes.io/projected/027e3c4c-1861-4933-9d30-636575099b5c-kube-api-access-dt2rt\") pod \"openstack-galera-0\" (UID: \"027e3c4c-1861-4933-9d30-636575099b5c\") " pod="openstack/openstack-galera-0" Oct 07 08:11:37 crc kubenswrapper[4875]: I1007 08:11:37.685910 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/8dad2a19-0fbf-46b6-b534-7f2712b644d7-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"8dad2a19-0fbf-46b6-b534-7f2712b644d7\") " pod="openstack/openstack-cell1-galera-0" Oct 07 08:11:37 crc kubenswrapper[4875]: I1007 08:11:37.686453 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/8dad2a19-0fbf-46b6-b534-7f2712b644d7-secrets\") pod \"openstack-cell1-galera-0\" (UID: \"8dad2a19-0fbf-46b6-b534-7f2712b644d7\") " pod="openstack/openstack-cell1-galera-0" Oct 07 08:11:37 crc kubenswrapper[4875]: I1007 08:11:37.691465 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-crdnz\" (UniqueName: \"kubernetes.io/projected/8dad2a19-0fbf-46b6-b534-7f2712b644d7-kube-api-access-crdnz\") pod \"openstack-cell1-galera-0\" (UID: \"8dad2a19-0fbf-46b6-b534-7f2712b644d7\") " pod="openstack/openstack-cell1-galera-0" Oct 07 08:11:37 crc kubenswrapper[4875]: I1007 08:11:37.693584 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/027e3c4c-1861-4933-9d30-636575099b5c-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"027e3c4c-1861-4933-9d30-636575099b5c\") " pod="openstack/openstack-galera-0" Oct 07 08:11:37 crc kubenswrapper[4875]: I1007 08:11:37.701898 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"openstack-galera-0\" (UID: \"027e3c4c-1861-4933-9d30-636575099b5c\") " pod="openstack/openstack-galera-0" Oct 07 08:11:37 crc kubenswrapper[4875]: I1007 08:11:37.722834 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"openstack-cell1-galera-0\" (UID: \"8dad2a19-0fbf-46b6-b534-7f2712b644d7\") " pod="openstack/openstack-cell1-galera-0" Oct 07 08:11:37 crc kubenswrapper[4875]: I1007 08:11:37.812116 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Oct 07 08:11:37 crc kubenswrapper[4875]: I1007 08:11:37.829321 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Oct 07 08:11:37 crc kubenswrapper[4875]: I1007 08:11:37.961044 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/memcached-0"] Oct 07 08:11:37 crc kubenswrapper[4875]: I1007 08:11:37.962564 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Oct 07 08:11:37 crc kubenswrapper[4875]: I1007 08:11:37.966986 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"memcached-config-data" Oct 07 08:11:37 crc kubenswrapper[4875]: I1007 08:11:37.967192 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-memcached-svc" Oct 07 08:11:37 crc kubenswrapper[4875]: I1007 08:11:37.967317 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"memcached-memcached-dockercfg-f2gtn" Oct 07 08:11:37 crc kubenswrapper[4875]: I1007 08:11:37.983750 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Oct 07 08:11:38 crc kubenswrapper[4875]: I1007 08:11:38.069040 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4cc2ad0d-e745-4a93-a5fb-d9f891bc3474-combined-ca-bundle\") pod \"memcached-0\" (UID: \"4cc2ad0d-e745-4a93-a5fb-d9f891bc3474\") " pod="openstack/memcached-0" Oct 07 08:11:38 crc kubenswrapper[4875]: I1007 08:11:38.069093 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/4cc2ad0d-e745-4a93-a5fb-d9f891bc3474-memcached-tls-certs\") pod \"memcached-0\" (UID: \"4cc2ad0d-e745-4a93-a5fb-d9f891bc3474\") " pod="openstack/memcached-0" Oct 07 08:11:38 crc kubenswrapper[4875]: I1007 08:11:38.069155 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bm879\" (UniqueName: \"kubernetes.io/projected/4cc2ad0d-e745-4a93-a5fb-d9f891bc3474-kube-api-access-bm879\") pod \"memcached-0\" (UID: \"4cc2ad0d-e745-4a93-a5fb-d9f891bc3474\") " pod="openstack/memcached-0" Oct 07 08:11:38 crc kubenswrapper[4875]: I1007 08:11:38.069220 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/4cc2ad0d-e745-4a93-a5fb-d9f891bc3474-config-data\") pod \"memcached-0\" (UID: \"4cc2ad0d-e745-4a93-a5fb-d9f891bc3474\") " pod="openstack/memcached-0" Oct 07 08:11:38 crc kubenswrapper[4875]: I1007 08:11:38.069246 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/4cc2ad0d-e745-4a93-a5fb-d9f891bc3474-kolla-config\") pod \"memcached-0\" (UID: \"4cc2ad0d-e745-4a93-a5fb-d9f891bc3474\") " pod="openstack/memcached-0" Oct 07 08:11:38 crc kubenswrapper[4875]: I1007 08:11:38.174246 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/4cc2ad0d-e745-4a93-a5fb-d9f891bc3474-config-data\") pod \"memcached-0\" (UID: \"4cc2ad0d-e745-4a93-a5fb-d9f891bc3474\") " pod="openstack/memcached-0" Oct 07 08:11:38 crc kubenswrapper[4875]: I1007 08:11:38.174321 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/4cc2ad0d-e745-4a93-a5fb-d9f891bc3474-kolla-config\") pod \"memcached-0\" (UID: \"4cc2ad0d-e745-4a93-a5fb-d9f891bc3474\") " pod="openstack/memcached-0" Oct 07 08:11:38 crc kubenswrapper[4875]: I1007 08:11:38.174398 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4cc2ad0d-e745-4a93-a5fb-d9f891bc3474-combined-ca-bundle\") pod \"memcached-0\" (UID: \"4cc2ad0d-e745-4a93-a5fb-d9f891bc3474\") " pod="openstack/memcached-0" Oct 07 08:11:38 crc kubenswrapper[4875]: I1007 08:11:38.174425 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/4cc2ad0d-e745-4a93-a5fb-d9f891bc3474-memcached-tls-certs\") pod \"memcached-0\" (UID: \"4cc2ad0d-e745-4a93-a5fb-d9f891bc3474\") " pod="openstack/memcached-0" Oct 07 08:11:38 crc kubenswrapper[4875]: I1007 08:11:38.174485 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bm879\" (UniqueName: \"kubernetes.io/projected/4cc2ad0d-e745-4a93-a5fb-d9f891bc3474-kube-api-access-bm879\") pod \"memcached-0\" (UID: \"4cc2ad0d-e745-4a93-a5fb-d9f891bc3474\") " pod="openstack/memcached-0" Oct 07 08:11:38 crc kubenswrapper[4875]: I1007 08:11:38.175646 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/4cc2ad0d-e745-4a93-a5fb-d9f891bc3474-config-data\") pod \"memcached-0\" (UID: \"4cc2ad0d-e745-4a93-a5fb-d9f891bc3474\") " pod="openstack/memcached-0" Oct 07 08:11:38 crc kubenswrapper[4875]: I1007 08:11:38.175681 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/4cc2ad0d-e745-4a93-a5fb-d9f891bc3474-kolla-config\") pod \"memcached-0\" (UID: \"4cc2ad0d-e745-4a93-a5fb-d9f891bc3474\") " pod="openstack/memcached-0" Oct 07 08:11:38 crc kubenswrapper[4875]: I1007 08:11:38.189954 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4cc2ad0d-e745-4a93-a5fb-d9f891bc3474-combined-ca-bundle\") pod \"memcached-0\" (UID: \"4cc2ad0d-e745-4a93-a5fb-d9f891bc3474\") " pod="openstack/memcached-0" Oct 07 08:11:38 crc kubenswrapper[4875]: I1007 08:11:38.196385 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/4cc2ad0d-e745-4a93-a5fb-d9f891bc3474-memcached-tls-certs\") pod \"memcached-0\" (UID: \"4cc2ad0d-e745-4a93-a5fb-d9f891bc3474\") " pod="openstack/memcached-0" Oct 07 08:11:38 crc kubenswrapper[4875]: I1007 08:11:38.196653 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bm879\" (UniqueName: \"kubernetes.io/projected/4cc2ad0d-e745-4a93-a5fb-d9f891bc3474-kube-api-access-bm879\") pod \"memcached-0\" (UID: \"4cc2ad0d-e745-4a93-a5fb-d9f891bc3474\") " pod="openstack/memcached-0" Oct 07 08:11:38 crc kubenswrapper[4875]: I1007 08:11:38.347816 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Oct 07 08:11:39 crc kubenswrapper[4875]: I1007 08:11:39.979595 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Oct 07 08:11:39 crc kubenswrapper[4875]: I1007 08:11:39.981861 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Oct 07 08:11:39 crc kubenswrapper[4875]: I1007 08:11:39.984339 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-ceilometer-dockercfg-cztgc" Oct 07 08:11:40 crc kubenswrapper[4875]: I1007 08:11:39.998599 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Oct 07 08:11:40 crc kubenswrapper[4875]: I1007 08:11:40.011560 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nkmd4\" (UniqueName: \"kubernetes.io/projected/c187d3cc-62d2-43b8-9616-03670de368da-kube-api-access-nkmd4\") pod \"kube-state-metrics-0\" (UID: \"c187d3cc-62d2-43b8-9616-03670de368da\") " pod="openstack/kube-state-metrics-0" Oct 07 08:11:40 crc kubenswrapper[4875]: I1007 08:11:40.128004 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nkmd4\" (UniqueName: \"kubernetes.io/projected/c187d3cc-62d2-43b8-9616-03670de368da-kube-api-access-nkmd4\") pod \"kube-state-metrics-0\" (UID: \"c187d3cc-62d2-43b8-9616-03670de368da\") " pod="openstack/kube-state-metrics-0" Oct 07 08:11:40 crc kubenswrapper[4875]: I1007 08:11:40.171180 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nkmd4\" (UniqueName: \"kubernetes.io/projected/c187d3cc-62d2-43b8-9616-03670de368da-kube-api-access-nkmd4\") pod \"kube-state-metrics-0\" (UID: \"c187d3cc-62d2-43b8-9616-03670de368da\") " pod="openstack/kube-state-metrics-0" Oct 07 08:11:40 crc kubenswrapper[4875]: I1007 08:11:40.308721 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Oct 07 08:11:43 crc kubenswrapper[4875]: I1007 08:11:43.363915 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-npwcn"] Oct 07 08:11:43 crc kubenswrapper[4875]: I1007 08:11:43.365586 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-npwcn" Oct 07 08:11:43 crc kubenswrapper[4875]: I1007 08:11:43.367382 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovncontroller-ovndbs" Oct 07 08:11:43 crc kubenswrapper[4875]: I1007 08:11:43.368083 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncontroller-ovncontroller-dockercfg-mgx9r" Oct 07 08:11:43 crc kubenswrapper[4875]: I1007 08:11:43.371666 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-scripts" Oct 07 08:11:43 crc kubenswrapper[4875]: I1007 08:11:43.384382 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-npwcn"] Oct 07 08:11:43 crc kubenswrapper[4875]: I1007 08:11:43.415063 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-ovs-vwbf5"] Oct 07 08:11:43 crc kubenswrapper[4875]: I1007 08:11:43.417257 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-vwbf5" Oct 07 08:11:43 crc kubenswrapper[4875]: I1007 08:11:43.444258 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-vwbf5"] Oct 07 08:11:43 crc kubenswrapper[4875]: I1007 08:11:43.502595 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/97a62f1e-e3e0-4592-82a8-2524ba6df291-combined-ca-bundle\") pod \"ovn-controller-npwcn\" (UID: \"97a62f1e-e3e0-4592-82a8-2524ba6df291\") " pod="openstack/ovn-controller-npwcn" Oct 07 08:11:43 crc kubenswrapper[4875]: I1007 08:11:43.502654 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/97a62f1e-e3e0-4592-82a8-2524ba6df291-var-run\") pod \"ovn-controller-npwcn\" (UID: \"97a62f1e-e3e0-4592-82a8-2524ba6df291\") " pod="openstack/ovn-controller-npwcn" Oct 07 08:11:43 crc kubenswrapper[4875]: I1007 08:11:43.502923 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/97a62f1e-e3e0-4592-82a8-2524ba6df291-var-run-ovn\") pod \"ovn-controller-npwcn\" (UID: \"97a62f1e-e3e0-4592-82a8-2524ba6df291\") " pod="openstack/ovn-controller-npwcn" Oct 07 08:11:43 crc kubenswrapper[4875]: I1007 08:11:43.503076 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/318e0040-83b4-4fa3-95ff-768b9eb422e7-scripts\") pod \"ovn-controller-ovs-vwbf5\" (UID: \"318e0040-83b4-4fa3-95ff-768b9eb422e7\") " pod="openstack/ovn-controller-ovs-vwbf5" Oct 07 08:11:43 crc kubenswrapper[4875]: I1007 08:11:43.503211 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/97a62f1e-e3e0-4592-82a8-2524ba6df291-ovn-controller-tls-certs\") pod \"ovn-controller-npwcn\" (UID: \"97a62f1e-e3e0-4592-82a8-2524ba6df291\") " pod="openstack/ovn-controller-npwcn" Oct 07 08:11:43 crc kubenswrapper[4875]: I1007 08:11:43.503344 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/318e0040-83b4-4fa3-95ff-768b9eb422e7-var-lib\") pod \"ovn-controller-ovs-vwbf5\" (UID: \"318e0040-83b4-4fa3-95ff-768b9eb422e7\") " pod="openstack/ovn-controller-ovs-vwbf5" Oct 07 08:11:43 crc kubenswrapper[4875]: I1007 08:11:43.503392 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/318e0040-83b4-4fa3-95ff-768b9eb422e7-var-log\") pod \"ovn-controller-ovs-vwbf5\" (UID: \"318e0040-83b4-4fa3-95ff-768b9eb422e7\") " pod="openstack/ovn-controller-ovs-vwbf5" Oct 07 08:11:43 crc kubenswrapper[4875]: I1007 08:11:43.503419 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/97a62f1e-e3e0-4592-82a8-2524ba6df291-var-log-ovn\") pod \"ovn-controller-npwcn\" (UID: \"97a62f1e-e3e0-4592-82a8-2524ba6df291\") " pod="openstack/ovn-controller-npwcn" Oct 07 08:11:43 crc kubenswrapper[4875]: I1007 08:11:43.503711 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/318e0040-83b4-4fa3-95ff-768b9eb422e7-var-run\") pod \"ovn-controller-ovs-vwbf5\" (UID: \"318e0040-83b4-4fa3-95ff-768b9eb422e7\") " pod="openstack/ovn-controller-ovs-vwbf5" Oct 07 08:11:43 crc kubenswrapper[4875]: I1007 08:11:43.503747 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/318e0040-83b4-4fa3-95ff-768b9eb422e7-etc-ovs\") pod \"ovn-controller-ovs-vwbf5\" (UID: \"318e0040-83b4-4fa3-95ff-768b9eb422e7\") " pod="openstack/ovn-controller-ovs-vwbf5" Oct 07 08:11:43 crc kubenswrapper[4875]: I1007 08:11:43.503761 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jscrp\" (UniqueName: \"kubernetes.io/projected/97a62f1e-e3e0-4592-82a8-2524ba6df291-kube-api-access-jscrp\") pod \"ovn-controller-npwcn\" (UID: \"97a62f1e-e3e0-4592-82a8-2524ba6df291\") " pod="openstack/ovn-controller-npwcn" Oct 07 08:11:43 crc kubenswrapper[4875]: I1007 08:11:43.503832 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d499p\" (UniqueName: \"kubernetes.io/projected/318e0040-83b4-4fa3-95ff-768b9eb422e7-kube-api-access-d499p\") pod \"ovn-controller-ovs-vwbf5\" (UID: \"318e0040-83b4-4fa3-95ff-768b9eb422e7\") " pod="openstack/ovn-controller-ovs-vwbf5" Oct 07 08:11:43 crc kubenswrapper[4875]: I1007 08:11:43.503851 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/97a62f1e-e3e0-4592-82a8-2524ba6df291-scripts\") pod \"ovn-controller-npwcn\" (UID: \"97a62f1e-e3e0-4592-82a8-2524ba6df291\") " pod="openstack/ovn-controller-npwcn" Oct 07 08:11:43 crc kubenswrapper[4875]: I1007 08:11:43.605096 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d499p\" (UniqueName: \"kubernetes.io/projected/318e0040-83b4-4fa3-95ff-768b9eb422e7-kube-api-access-d499p\") pod \"ovn-controller-ovs-vwbf5\" (UID: \"318e0040-83b4-4fa3-95ff-768b9eb422e7\") " pod="openstack/ovn-controller-ovs-vwbf5" Oct 07 08:11:43 crc kubenswrapper[4875]: I1007 08:11:43.605166 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/97a62f1e-e3e0-4592-82a8-2524ba6df291-scripts\") pod \"ovn-controller-npwcn\" (UID: \"97a62f1e-e3e0-4592-82a8-2524ba6df291\") " pod="openstack/ovn-controller-npwcn" Oct 07 08:11:43 crc kubenswrapper[4875]: I1007 08:11:43.605227 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/97a62f1e-e3e0-4592-82a8-2524ba6df291-combined-ca-bundle\") pod \"ovn-controller-npwcn\" (UID: \"97a62f1e-e3e0-4592-82a8-2524ba6df291\") " pod="openstack/ovn-controller-npwcn" Oct 07 08:11:43 crc kubenswrapper[4875]: I1007 08:11:43.605258 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/97a62f1e-e3e0-4592-82a8-2524ba6df291-var-run\") pod \"ovn-controller-npwcn\" (UID: \"97a62f1e-e3e0-4592-82a8-2524ba6df291\") " pod="openstack/ovn-controller-npwcn" Oct 07 08:11:43 crc kubenswrapper[4875]: I1007 08:11:43.605290 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/97a62f1e-e3e0-4592-82a8-2524ba6df291-var-run-ovn\") pod \"ovn-controller-npwcn\" (UID: \"97a62f1e-e3e0-4592-82a8-2524ba6df291\") " pod="openstack/ovn-controller-npwcn" Oct 07 08:11:43 crc kubenswrapper[4875]: I1007 08:11:43.605326 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/318e0040-83b4-4fa3-95ff-768b9eb422e7-scripts\") pod \"ovn-controller-ovs-vwbf5\" (UID: \"318e0040-83b4-4fa3-95ff-768b9eb422e7\") " pod="openstack/ovn-controller-ovs-vwbf5" Oct 07 08:11:43 crc kubenswrapper[4875]: I1007 08:11:43.605348 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/97a62f1e-e3e0-4592-82a8-2524ba6df291-ovn-controller-tls-certs\") pod \"ovn-controller-npwcn\" (UID: \"97a62f1e-e3e0-4592-82a8-2524ba6df291\") " pod="openstack/ovn-controller-npwcn" Oct 07 08:11:43 crc kubenswrapper[4875]: I1007 08:11:43.605377 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/318e0040-83b4-4fa3-95ff-768b9eb422e7-var-lib\") pod \"ovn-controller-ovs-vwbf5\" (UID: \"318e0040-83b4-4fa3-95ff-768b9eb422e7\") " pod="openstack/ovn-controller-ovs-vwbf5" Oct 07 08:11:43 crc kubenswrapper[4875]: I1007 08:11:43.605398 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/318e0040-83b4-4fa3-95ff-768b9eb422e7-var-log\") pod \"ovn-controller-ovs-vwbf5\" (UID: \"318e0040-83b4-4fa3-95ff-768b9eb422e7\") " pod="openstack/ovn-controller-ovs-vwbf5" Oct 07 08:11:43 crc kubenswrapper[4875]: I1007 08:11:43.605416 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/97a62f1e-e3e0-4592-82a8-2524ba6df291-var-log-ovn\") pod \"ovn-controller-npwcn\" (UID: \"97a62f1e-e3e0-4592-82a8-2524ba6df291\") " pod="openstack/ovn-controller-npwcn" Oct 07 08:11:43 crc kubenswrapper[4875]: I1007 08:11:43.605509 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/318e0040-83b4-4fa3-95ff-768b9eb422e7-var-run\") pod \"ovn-controller-ovs-vwbf5\" (UID: \"318e0040-83b4-4fa3-95ff-768b9eb422e7\") " pod="openstack/ovn-controller-ovs-vwbf5" Oct 07 08:11:43 crc kubenswrapper[4875]: I1007 08:11:43.605535 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/318e0040-83b4-4fa3-95ff-768b9eb422e7-etc-ovs\") pod \"ovn-controller-ovs-vwbf5\" (UID: \"318e0040-83b4-4fa3-95ff-768b9eb422e7\") " pod="openstack/ovn-controller-ovs-vwbf5" Oct 07 08:11:43 crc kubenswrapper[4875]: I1007 08:11:43.605554 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jscrp\" (UniqueName: \"kubernetes.io/projected/97a62f1e-e3e0-4592-82a8-2524ba6df291-kube-api-access-jscrp\") pod \"ovn-controller-npwcn\" (UID: \"97a62f1e-e3e0-4592-82a8-2524ba6df291\") " pod="openstack/ovn-controller-npwcn" Oct 07 08:11:43 crc kubenswrapper[4875]: I1007 08:11:43.606102 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/97a62f1e-e3e0-4592-82a8-2524ba6df291-var-run-ovn\") pod \"ovn-controller-npwcn\" (UID: \"97a62f1e-e3e0-4592-82a8-2524ba6df291\") " pod="openstack/ovn-controller-npwcn" Oct 07 08:11:43 crc kubenswrapper[4875]: I1007 08:11:43.606180 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/318e0040-83b4-4fa3-95ff-768b9eb422e7-var-lib\") pod \"ovn-controller-ovs-vwbf5\" (UID: \"318e0040-83b4-4fa3-95ff-768b9eb422e7\") " pod="openstack/ovn-controller-ovs-vwbf5" Oct 07 08:11:43 crc kubenswrapper[4875]: I1007 08:11:43.606319 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/318e0040-83b4-4fa3-95ff-768b9eb422e7-var-log\") pod \"ovn-controller-ovs-vwbf5\" (UID: \"318e0040-83b4-4fa3-95ff-768b9eb422e7\") " pod="openstack/ovn-controller-ovs-vwbf5" Oct 07 08:11:43 crc kubenswrapper[4875]: I1007 08:11:43.606350 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/318e0040-83b4-4fa3-95ff-768b9eb422e7-var-run\") pod \"ovn-controller-ovs-vwbf5\" (UID: \"318e0040-83b4-4fa3-95ff-768b9eb422e7\") " pod="openstack/ovn-controller-ovs-vwbf5" Oct 07 08:11:43 crc kubenswrapper[4875]: I1007 08:11:43.606388 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/318e0040-83b4-4fa3-95ff-768b9eb422e7-etc-ovs\") pod \"ovn-controller-ovs-vwbf5\" (UID: \"318e0040-83b4-4fa3-95ff-768b9eb422e7\") " pod="openstack/ovn-controller-ovs-vwbf5" Oct 07 08:11:43 crc kubenswrapper[4875]: I1007 08:11:43.606377 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/97a62f1e-e3e0-4592-82a8-2524ba6df291-var-run\") pod \"ovn-controller-npwcn\" (UID: \"97a62f1e-e3e0-4592-82a8-2524ba6df291\") " pod="openstack/ovn-controller-npwcn" Oct 07 08:11:43 crc kubenswrapper[4875]: I1007 08:11:43.606415 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/97a62f1e-e3e0-4592-82a8-2524ba6df291-var-log-ovn\") pod \"ovn-controller-npwcn\" (UID: \"97a62f1e-e3e0-4592-82a8-2524ba6df291\") " pod="openstack/ovn-controller-npwcn" Oct 07 08:11:43 crc kubenswrapper[4875]: I1007 08:11:43.607683 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/97a62f1e-e3e0-4592-82a8-2524ba6df291-scripts\") pod \"ovn-controller-npwcn\" (UID: \"97a62f1e-e3e0-4592-82a8-2524ba6df291\") " pod="openstack/ovn-controller-npwcn" Oct 07 08:11:43 crc kubenswrapper[4875]: I1007 08:11:43.608254 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/318e0040-83b4-4fa3-95ff-768b9eb422e7-scripts\") pod \"ovn-controller-ovs-vwbf5\" (UID: \"318e0040-83b4-4fa3-95ff-768b9eb422e7\") " pod="openstack/ovn-controller-ovs-vwbf5" Oct 07 08:11:43 crc kubenswrapper[4875]: I1007 08:11:43.612748 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/97a62f1e-e3e0-4592-82a8-2524ba6df291-combined-ca-bundle\") pod \"ovn-controller-npwcn\" (UID: \"97a62f1e-e3e0-4592-82a8-2524ba6df291\") " pod="openstack/ovn-controller-npwcn" Oct 07 08:11:43 crc kubenswrapper[4875]: I1007 08:11:43.626932 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/97a62f1e-e3e0-4592-82a8-2524ba6df291-ovn-controller-tls-certs\") pod \"ovn-controller-npwcn\" (UID: \"97a62f1e-e3e0-4592-82a8-2524ba6df291\") " pod="openstack/ovn-controller-npwcn" Oct 07 08:11:43 crc kubenswrapper[4875]: I1007 08:11:43.628840 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jscrp\" (UniqueName: \"kubernetes.io/projected/97a62f1e-e3e0-4592-82a8-2524ba6df291-kube-api-access-jscrp\") pod \"ovn-controller-npwcn\" (UID: \"97a62f1e-e3e0-4592-82a8-2524ba6df291\") " pod="openstack/ovn-controller-npwcn" Oct 07 08:11:43 crc kubenswrapper[4875]: I1007 08:11:43.630432 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d499p\" (UniqueName: \"kubernetes.io/projected/318e0040-83b4-4fa3-95ff-768b9eb422e7-kube-api-access-d499p\") pod \"ovn-controller-ovs-vwbf5\" (UID: \"318e0040-83b4-4fa3-95ff-768b9eb422e7\") " pod="openstack/ovn-controller-ovs-vwbf5" Oct 07 08:11:43 crc kubenswrapper[4875]: I1007 08:11:43.688815 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-npwcn" Oct 07 08:11:43 crc kubenswrapper[4875]: I1007 08:11:43.738783 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-vwbf5" Oct 07 08:11:44 crc kubenswrapper[4875]: I1007 08:11:44.288527 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-sb-0"] Oct 07 08:11:44 crc kubenswrapper[4875]: I1007 08:11:44.290731 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Oct 07 08:11:44 crc kubenswrapper[4875]: I1007 08:11:44.295495 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-sb-ovndbs" Oct 07 08:11:44 crc kubenswrapper[4875]: I1007 08:11:44.295781 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-config" Oct 07 08:11:44 crc kubenswrapper[4875]: I1007 08:11:44.295949 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-sb-dockercfg-xq9rx" Oct 07 08:11:44 crc kubenswrapper[4875]: I1007 08:11:44.295946 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovn-metrics" Oct 07 08:11:44 crc kubenswrapper[4875]: I1007 08:11:44.296047 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-scripts" Oct 07 08:11:44 crc kubenswrapper[4875]: I1007 08:11:44.304987 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Oct 07 08:11:44 crc kubenswrapper[4875]: I1007 08:11:44.424192 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ldp2f\" (UniqueName: \"kubernetes.io/projected/021914db-05c8-4ae9-a24e-dee6bfabff00-kube-api-access-ldp2f\") pod \"ovsdbserver-sb-0\" (UID: \"021914db-05c8-4ae9-a24e-dee6bfabff00\") " pod="openstack/ovsdbserver-sb-0" Oct 07 08:11:44 crc kubenswrapper[4875]: I1007 08:11:44.424265 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/021914db-05c8-4ae9-a24e-dee6bfabff00-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"021914db-05c8-4ae9-a24e-dee6bfabff00\") " pod="openstack/ovsdbserver-sb-0" Oct 07 08:11:44 crc kubenswrapper[4875]: I1007 08:11:44.424296 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/021914db-05c8-4ae9-a24e-dee6bfabff00-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"021914db-05c8-4ae9-a24e-dee6bfabff00\") " pod="openstack/ovsdbserver-sb-0" Oct 07 08:11:44 crc kubenswrapper[4875]: I1007 08:11:44.424389 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/021914db-05c8-4ae9-a24e-dee6bfabff00-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"021914db-05c8-4ae9-a24e-dee6bfabff00\") " pod="openstack/ovsdbserver-sb-0" Oct 07 08:11:44 crc kubenswrapper[4875]: I1007 08:11:44.424429 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"ovsdbserver-sb-0\" (UID: \"021914db-05c8-4ae9-a24e-dee6bfabff00\") " pod="openstack/ovsdbserver-sb-0" Oct 07 08:11:44 crc kubenswrapper[4875]: I1007 08:11:44.424462 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/021914db-05c8-4ae9-a24e-dee6bfabff00-config\") pod \"ovsdbserver-sb-0\" (UID: \"021914db-05c8-4ae9-a24e-dee6bfabff00\") " pod="openstack/ovsdbserver-sb-0" Oct 07 08:11:44 crc kubenswrapper[4875]: I1007 08:11:44.424494 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/021914db-05c8-4ae9-a24e-dee6bfabff00-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"021914db-05c8-4ae9-a24e-dee6bfabff00\") " pod="openstack/ovsdbserver-sb-0" Oct 07 08:11:44 crc kubenswrapper[4875]: I1007 08:11:44.424521 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/021914db-05c8-4ae9-a24e-dee6bfabff00-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"021914db-05c8-4ae9-a24e-dee6bfabff00\") " pod="openstack/ovsdbserver-sb-0" Oct 07 08:11:44 crc kubenswrapper[4875]: I1007 08:11:44.526563 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/021914db-05c8-4ae9-a24e-dee6bfabff00-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"021914db-05c8-4ae9-a24e-dee6bfabff00\") " pod="openstack/ovsdbserver-sb-0" Oct 07 08:11:44 crc kubenswrapper[4875]: I1007 08:11:44.526654 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"ovsdbserver-sb-0\" (UID: \"021914db-05c8-4ae9-a24e-dee6bfabff00\") " pod="openstack/ovsdbserver-sb-0" Oct 07 08:11:44 crc kubenswrapper[4875]: I1007 08:11:44.526682 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/021914db-05c8-4ae9-a24e-dee6bfabff00-config\") pod \"ovsdbserver-sb-0\" (UID: \"021914db-05c8-4ae9-a24e-dee6bfabff00\") " pod="openstack/ovsdbserver-sb-0" Oct 07 08:11:44 crc kubenswrapper[4875]: I1007 08:11:44.526717 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/021914db-05c8-4ae9-a24e-dee6bfabff00-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"021914db-05c8-4ae9-a24e-dee6bfabff00\") " pod="openstack/ovsdbserver-sb-0" Oct 07 08:11:44 crc kubenswrapper[4875]: I1007 08:11:44.526740 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/021914db-05c8-4ae9-a24e-dee6bfabff00-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"021914db-05c8-4ae9-a24e-dee6bfabff00\") " pod="openstack/ovsdbserver-sb-0" Oct 07 08:11:44 crc kubenswrapper[4875]: I1007 08:11:44.526776 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ldp2f\" (UniqueName: \"kubernetes.io/projected/021914db-05c8-4ae9-a24e-dee6bfabff00-kube-api-access-ldp2f\") pod \"ovsdbserver-sb-0\" (UID: \"021914db-05c8-4ae9-a24e-dee6bfabff00\") " pod="openstack/ovsdbserver-sb-0" Oct 07 08:11:44 crc kubenswrapper[4875]: I1007 08:11:44.526812 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/021914db-05c8-4ae9-a24e-dee6bfabff00-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"021914db-05c8-4ae9-a24e-dee6bfabff00\") " pod="openstack/ovsdbserver-sb-0" Oct 07 08:11:44 crc kubenswrapper[4875]: I1007 08:11:44.526840 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/021914db-05c8-4ae9-a24e-dee6bfabff00-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"021914db-05c8-4ae9-a24e-dee6bfabff00\") " pod="openstack/ovsdbserver-sb-0" Oct 07 08:11:44 crc kubenswrapper[4875]: I1007 08:11:44.529039 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/021914db-05c8-4ae9-a24e-dee6bfabff00-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"021914db-05c8-4ae9-a24e-dee6bfabff00\") " pod="openstack/ovsdbserver-sb-0" Oct 07 08:11:44 crc kubenswrapper[4875]: I1007 08:11:44.529683 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/021914db-05c8-4ae9-a24e-dee6bfabff00-config\") pod \"ovsdbserver-sb-0\" (UID: \"021914db-05c8-4ae9-a24e-dee6bfabff00\") " pod="openstack/ovsdbserver-sb-0" Oct 07 08:11:44 crc kubenswrapper[4875]: I1007 08:11:44.530000 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/021914db-05c8-4ae9-a24e-dee6bfabff00-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"021914db-05c8-4ae9-a24e-dee6bfabff00\") " pod="openstack/ovsdbserver-sb-0" Oct 07 08:11:44 crc kubenswrapper[4875]: I1007 08:11:44.530351 4875 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"ovsdbserver-sb-0\" (UID: \"021914db-05c8-4ae9-a24e-dee6bfabff00\") device mount path \"/mnt/openstack/pv05\"" pod="openstack/ovsdbserver-sb-0" Oct 07 08:11:44 crc kubenswrapper[4875]: I1007 08:11:44.546902 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/021914db-05c8-4ae9-a24e-dee6bfabff00-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"021914db-05c8-4ae9-a24e-dee6bfabff00\") " pod="openstack/ovsdbserver-sb-0" Oct 07 08:11:44 crc kubenswrapper[4875]: I1007 08:11:44.557105 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/021914db-05c8-4ae9-a24e-dee6bfabff00-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"021914db-05c8-4ae9-a24e-dee6bfabff00\") " pod="openstack/ovsdbserver-sb-0" Oct 07 08:11:44 crc kubenswrapper[4875]: I1007 08:11:44.559785 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ldp2f\" (UniqueName: \"kubernetes.io/projected/021914db-05c8-4ae9-a24e-dee6bfabff00-kube-api-access-ldp2f\") pod \"ovsdbserver-sb-0\" (UID: \"021914db-05c8-4ae9-a24e-dee6bfabff00\") " pod="openstack/ovsdbserver-sb-0" Oct 07 08:11:44 crc kubenswrapper[4875]: I1007 08:11:44.571372 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/021914db-05c8-4ae9-a24e-dee6bfabff00-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"021914db-05c8-4ae9-a24e-dee6bfabff00\") " pod="openstack/ovsdbserver-sb-0" Oct 07 08:11:44 crc kubenswrapper[4875]: I1007 08:11:44.586221 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"ovsdbserver-sb-0\" (UID: \"021914db-05c8-4ae9-a24e-dee6bfabff00\") " pod="openstack/ovsdbserver-sb-0" Oct 07 08:11:44 crc kubenswrapper[4875]: I1007 08:11:44.646248 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Oct 07 08:11:47 crc kubenswrapper[4875]: I1007 08:11:47.093739 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-nb-0"] Oct 07 08:11:47 crc kubenswrapper[4875]: I1007 08:11:47.095792 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Oct 07 08:11:47 crc kubenswrapper[4875]: I1007 08:11:47.109502 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Oct 07 08:11:47 crc kubenswrapper[4875]: I1007 08:11:47.110511 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-nb-ovndbs" Oct 07 08:11:47 crc kubenswrapper[4875]: I1007 08:11:47.113796 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-scripts" Oct 07 08:11:47 crc kubenswrapper[4875]: I1007 08:11:47.114069 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-nb-dockercfg-nc6xd" Oct 07 08:11:47 crc kubenswrapper[4875]: I1007 08:11:47.114245 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-config" Oct 07 08:11:47 crc kubenswrapper[4875]: I1007 08:11:47.197169 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"ovsdbserver-nb-0\" (UID: \"e845f7e2-55f1-445d-8155-8a92bc2ee519\") " pod="openstack/ovsdbserver-nb-0" Oct 07 08:11:47 crc kubenswrapper[4875]: I1007 08:11:47.197262 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e845f7e2-55f1-445d-8155-8a92bc2ee519-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"e845f7e2-55f1-445d-8155-8a92bc2ee519\") " pod="openstack/ovsdbserver-nb-0" Oct 07 08:11:47 crc kubenswrapper[4875]: I1007 08:11:47.197293 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/e845f7e2-55f1-445d-8155-8a92bc2ee519-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"e845f7e2-55f1-445d-8155-8a92bc2ee519\") " pod="openstack/ovsdbserver-nb-0" Oct 07 08:11:47 crc kubenswrapper[4875]: I1007 08:11:47.197342 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/e845f7e2-55f1-445d-8155-8a92bc2ee519-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"e845f7e2-55f1-445d-8155-8a92bc2ee519\") " pod="openstack/ovsdbserver-nb-0" Oct 07 08:11:47 crc kubenswrapper[4875]: I1007 08:11:47.197368 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e845f7e2-55f1-445d-8155-8a92bc2ee519-config\") pod \"ovsdbserver-nb-0\" (UID: \"e845f7e2-55f1-445d-8155-8a92bc2ee519\") " pod="openstack/ovsdbserver-nb-0" Oct 07 08:11:47 crc kubenswrapper[4875]: I1007 08:11:47.197392 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mxkbw\" (UniqueName: \"kubernetes.io/projected/e845f7e2-55f1-445d-8155-8a92bc2ee519-kube-api-access-mxkbw\") pod \"ovsdbserver-nb-0\" (UID: \"e845f7e2-55f1-445d-8155-8a92bc2ee519\") " pod="openstack/ovsdbserver-nb-0" Oct 07 08:11:47 crc kubenswrapper[4875]: I1007 08:11:47.197455 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e845f7e2-55f1-445d-8155-8a92bc2ee519-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"e845f7e2-55f1-445d-8155-8a92bc2ee519\") " pod="openstack/ovsdbserver-nb-0" Oct 07 08:11:47 crc kubenswrapper[4875]: I1007 08:11:47.197484 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/e845f7e2-55f1-445d-8155-8a92bc2ee519-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"e845f7e2-55f1-445d-8155-8a92bc2ee519\") " pod="openstack/ovsdbserver-nb-0" Oct 07 08:11:47 crc kubenswrapper[4875]: I1007 08:11:47.299564 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/e845f7e2-55f1-445d-8155-8a92bc2ee519-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"e845f7e2-55f1-445d-8155-8a92bc2ee519\") " pod="openstack/ovsdbserver-nb-0" Oct 07 08:11:47 crc kubenswrapper[4875]: I1007 08:11:47.299627 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e845f7e2-55f1-445d-8155-8a92bc2ee519-config\") pod \"ovsdbserver-nb-0\" (UID: \"e845f7e2-55f1-445d-8155-8a92bc2ee519\") " pod="openstack/ovsdbserver-nb-0" Oct 07 08:11:47 crc kubenswrapper[4875]: I1007 08:11:47.299662 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mxkbw\" (UniqueName: \"kubernetes.io/projected/e845f7e2-55f1-445d-8155-8a92bc2ee519-kube-api-access-mxkbw\") pod \"ovsdbserver-nb-0\" (UID: \"e845f7e2-55f1-445d-8155-8a92bc2ee519\") " pod="openstack/ovsdbserver-nb-0" Oct 07 08:11:47 crc kubenswrapper[4875]: I1007 08:11:47.299741 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e845f7e2-55f1-445d-8155-8a92bc2ee519-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"e845f7e2-55f1-445d-8155-8a92bc2ee519\") " pod="openstack/ovsdbserver-nb-0" Oct 07 08:11:47 crc kubenswrapper[4875]: I1007 08:11:47.299776 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/e845f7e2-55f1-445d-8155-8a92bc2ee519-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"e845f7e2-55f1-445d-8155-8a92bc2ee519\") " pod="openstack/ovsdbserver-nb-0" Oct 07 08:11:47 crc kubenswrapper[4875]: I1007 08:11:47.299855 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"ovsdbserver-nb-0\" (UID: \"e845f7e2-55f1-445d-8155-8a92bc2ee519\") " pod="openstack/ovsdbserver-nb-0" Oct 07 08:11:47 crc kubenswrapper[4875]: I1007 08:11:47.299922 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e845f7e2-55f1-445d-8155-8a92bc2ee519-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"e845f7e2-55f1-445d-8155-8a92bc2ee519\") " pod="openstack/ovsdbserver-nb-0" Oct 07 08:11:47 crc kubenswrapper[4875]: I1007 08:11:47.299949 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/e845f7e2-55f1-445d-8155-8a92bc2ee519-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"e845f7e2-55f1-445d-8155-8a92bc2ee519\") " pod="openstack/ovsdbserver-nb-0" Oct 07 08:11:47 crc kubenswrapper[4875]: I1007 08:11:47.300252 4875 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"ovsdbserver-nb-0\" (UID: \"e845f7e2-55f1-445d-8155-8a92bc2ee519\") device mount path \"/mnt/openstack/pv06\"" pod="openstack/ovsdbserver-nb-0" Oct 07 08:11:47 crc kubenswrapper[4875]: I1007 08:11:47.300331 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/e845f7e2-55f1-445d-8155-8a92bc2ee519-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"e845f7e2-55f1-445d-8155-8a92bc2ee519\") " pod="openstack/ovsdbserver-nb-0" Oct 07 08:11:47 crc kubenswrapper[4875]: I1007 08:11:47.300723 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e845f7e2-55f1-445d-8155-8a92bc2ee519-config\") pod \"ovsdbserver-nb-0\" (UID: \"e845f7e2-55f1-445d-8155-8a92bc2ee519\") " pod="openstack/ovsdbserver-nb-0" Oct 07 08:11:47 crc kubenswrapper[4875]: I1007 08:11:47.301151 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e845f7e2-55f1-445d-8155-8a92bc2ee519-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"e845f7e2-55f1-445d-8155-8a92bc2ee519\") " pod="openstack/ovsdbserver-nb-0" Oct 07 08:11:47 crc kubenswrapper[4875]: I1007 08:11:47.309192 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/e845f7e2-55f1-445d-8155-8a92bc2ee519-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"e845f7e2-55f1-445d-8155-8a92bc2ee519\") " pod="openstack/ovsdbserver-nb-0" Oct 07 08:11:47 crc kubenswrapper[4875]: I1007 08:11:47.310062 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/e845f7e2-55f1-445d-8155-8a92bc2ee519-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"e845f7e2-55f1-445d-8155-8a92bc2ee519\") " pod="openstack/ovsdbserver-nb-0" Oct 07 08:11:47 crc kubenswrapper[4875]: I1007 08:11:47.319403 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e845f7e2-55f1-445d-8155-8a92bc2ee519-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"e845f7e2-55f1-445d-8155-8a92bc2ee519\") " pod="openstack/ovsdbserver-nb-0" Oct 07 08:11:47 crc kubenswrapper[4875]: I1007 08:11:47.357126 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"ovsdbserver-nb-0\" (UID: \"e845f7e2-55f1-445d-8155-8a92bc2ee519\") " pod="openstack/ovsdbserver-nb-0" Oct 07 08:11:47 crc kubenswrapper[4875]: I1007 08:11:47.359741 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mxkbw\" (UniqueName: \"kubernetes.io/projected/e845f7e2-55f1-445d-8155-8a92bc2ee519-kube-api-access-mxkbw\") pod \"ovsdbserver-nb-0\" (UID: \"e845f7e2-55f1-445d-8155-8a92bc2ee519\") " pod="openstack/ovsdbserver-nb-0" Oct 07 08:11:47 crc kubenswrapper[4875]: I1007 08:11:47.418610 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Oct 07 08:11:52 crc kubenswrapper[4875]: E1007 08:11:52.601119 4875 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified" Oct 07 08:11:52 crc kubenswrapper[4875]: E1007 08:11:52.601959 4875 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:setup-container,Image:quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified,Command:[sh -c cp /tmp/erlang-cookie-secret/.erlang.cookie /var/lib/rabbitmq/.erlang.cookie && chmod 600 /var/lib/rabbitmq/.erlang.cookie ; cp /tmp/rabbitmq-plugins/enabled_plugins /operator/enabled_plugins ; echo '[default]' > /var/lib/rabbitmq/.rabbitmqadmin.conf && sed -e 's/default_user/username/' -e 's/default_pass/password/' /tmp/default_user.conf >> /var/lib/rabbitmq/.rabbitmqadmin.conf && chmod 600 /var/lib/rabbitmq/.rabbitmqadmin.conf ; sleep 30],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{20 -3} {} 20m DecimalSI},memory: {{67108864 0} {} BinarySI},},Requests:ResourceList{cpu: {{20 -3} {} 20m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:plugins-conf,ReadOnly:false,MountPath:/tmp/rabbitmq-plugins/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-erlang-cookie,ReadOnly:false,MountPath:/var/lib/rabbitmq/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:erlang-cookie-secret,ReadOnly:false,MountPath:/tmp/erlang-cookie-secret/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-plugins,ReadOnly:false,MountPath:/operator,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:persistence,ReadOnly:false,MountPath:/var/lib/rabbitmq/mnesia/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-confd,ReadOnly:false,MountPath:/tmp/default_user.conf,SubPath:default_user.conf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-rzzf5,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-cell1-server-0_openstack(169a8fe1-831d-46f5-b939-e1507c89453e): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 07 08:11:52 crc kubenswrapper[4875]: E1007 08:11:52.603250 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"setup-container\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/rabbitmq-cell1-server-0" podUID="169a8fe1-831d-46f5-b939-e1507c89453e" Oct 07 08:11:52 crc kubenswrapper[4875]: E1007 08:11:52.603513 4875 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified" Oct 07 08:11:52 crc kubenswrapper[4875]: E1007 08:11:52.603633 4875 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:setup-container,Image:quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified,Command:[sh -c cp /tmp/erlang-cookie-secret/.erlang.cookie /var/lib/rabbitmq/.erlang.cookie && chmod 600 /var/lib/rabbitmq/.erlang.cookie ; cp /tmp/rabbitmq-plugins/enabled_plugins /operator/enabled_plugins ; echo '[default]' > /var/lib/rabbitmq/.rabbitmqadmin.conf && sed -e 's/default_user/username/' -e 's/default_pass/password/' /tmp/default_user.conf >> /var/lib/rabbitmq/.rabbitmqadmin.conf && chmod 600 /var/lib/rabbitmq/.rabbitmqadmin.conf ; sleep 30],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{20 -3} {} 20m DecimalSI},memory: {{67108864 0} {} BinarySI},},Requests:ResourceList{cpu: {{20 -3} {} 20m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:plugins-conf,ReadOnly:false,MountPath:/tmp/rabbitmq-plugins/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-erlang-cookie,ReadOnly:false,MountPath:/var/lib/rabbitmq/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:erlang-cookie-secret,ReadOnly:false,MountPath:/tmp/erlang-cookie-secret/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-plugins,ReadOnly:false,MountPath:/operator,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:persistence,ReadOnly:false,MountPath:/var/lib/rabbitmq/mnesia/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-confd,ReadOnly:false,MountPath:/tmp/default_user.conf,SubPath:default_user.conf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-jb85f,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-server-0_openstack(06c908ee-f087-4e43-904f-5cc1e01a2464): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 07 08:11:52 crc kubenswrapper[4875]: E1007 08:11:52.605803 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"setup-container\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/rabbitmq-server-0" podUID="06c908ee-f087-4e43-904f-5cc1e01a2464" Oct 07 08:11:52 crc kubenswrapper[4875]: E1007 08:11:52.725153 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"setup-container\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified\\\"\"" pod="openstack/rabbitmq-cell1-server-0" podUID="169a8fe1-831d-46f5-b939-e1507c89453e" Oct 07 08:11:52 crc kubenswrapper[4875]: E1007 08:11:52.725965 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"setup-container\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified\\\"\"" pod="openstack/rabbitmq-server-0" podUID="06c908ee-f087-4e43-904f-5cc1e01a2464" Oct 07 08:11:53 crc kubenswrapper[4875]: E1007 08:11:53.490444 4875 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Oct 07 08:11:53 crc kubenswrapper[4875]: E1007 08:11:53.490681 4875 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:nfdh5dfhb6h64h676hc4h78h97h669h54chfbh696hb5h54bh5d4h6bh64h644h677h584h5cbh698h9dh5bbh5f8h5b8hcdh644h5c7h694hbfh589q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-q6w99,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-5ccc8479f9-sx8tp_openstack(12c7ef4c-d13d-4813-9e2f-37197f79207e): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 07 08:11:53 crc kubenswrapper[4875]: E1007 08:11:53.491892 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-5ccc8479f9-sx8tp" podUID="12c7ef4c-d13d-4813-9e2f-37197f79207e" Oct 07 08:11:53 crc kubenswrapper[4875]: E1007 08:11:53.498210 4875 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Oct 07 08:11:53 crc kubenswrapper[4875]: E1007 08:11:53.498426 4875 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:nffh5bdhf4h5f8h79h55h77h58fh56dh7bh6fh578hbch55dh68h56bhd9h65dh57ch658hc9h566h666h688h58h65dh684h5d7h6ch575h5d6h88q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-wgf5t,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-675f4bcbfc-zg72b_openstack(c95ebd05-76be-47ee-ad07-c1e84d34a235): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 07 08:11:53 crc kubenswrapper[4875]: E1007 08:11:53.500379 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-675f4bcbfc-zg72b" podUID="c95ebd05-76be-47ee-ad07-c1e84d34a235" Oct 07 08:11:53 crc kubenswrapper[4875]: E1007 08:11:53.544171 4875 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Oct 07 08:11:53 crc kubenswrapper[4875]: E1007 08:11:53.545091 4875 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:ndfhb5h667h568h584h5f9h58dh565h664h587h597h577h64bh5c4h66fh647hbdh68ch5c5h68dh686h5f7h64hd7hc6h55fh57bh98h57fh87h5fh57fq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-wvx6v,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-78dd6ddcc-4kj9j_openstack(1d0e537c-bf41-486e-89dc-b90686db28e7): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 07 08:11:53 crc kubenswrapper[4875]: E1007 08:11:53.546917 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-78dd6ddcc-4kj9j" podUID="1d0e537c-bf41-486e-89dc-b90686db28e7" Oct 07 08:11:53 crc kubenswrapper[4875]: E1007 08:11:53.549979 4875 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Oct 07 08:11:53 crc kubenswrapper[4875]: E1007 08:11:53.550068 4875 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n659h4h664hbh658h587h67ch89h587h8fh679hc6hf9h55fh644h5d5h698h68dh5cdh5ffh669h54ch9h689hb8hd4h5bfhd8h5d7h5fh665h574q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-bvnch,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-57d769cc4f-qbh8c_openstack(13763859-5bdc-495b-b344-b2b8e0b7fd1e): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 07 08:11:53 crc kubenswrapper[4875]: E1007 08:11:53.551945 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-57d769cc4f-qbh8c" podUID="13763859-5bdc-495b-b344-b2b8e0b7fd1e" Oct 07 08:11:53 crc kubenswrapper[4875]: E1007 08:11:53.746502 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified\\\"\"" pod="openstack/dnsmasq-dns-5ccc8479f9-sx8tp" podUID="12c7ef4c-d13d-4813-9e2f-37197f79207e" Oct 07 08:11:53 crc kubenswrapper[4875]: E1007 08:11:53.756760 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified\\\"\"" pod="openstack/dnsmasq-dns-57d769cc4f-qbh8c" podUID="13763859-5bdc-495b-b344-b2b8e0b7fd1e" Oct 07 08:11:54 crc kubenswrapper[4875]: I1007 08:11:54.111010 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Oct 07 08:11:54 crc kubenswrapper[4875]: I1007 08:11:54.227671 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Oct 07 08:11:54 crc kubenswrapper[4875]: W1007 08:11:54.232769 4875 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod021914db_05c8_4ae9_a24e_dee6bfabff00.slice/crio-4003fad972f3135f50c1c0a3e0902393e4c6390181141da056c86150bbdc5b3d WatchSource:0}: Error finding container 4003fad972f3135f50c1c0a3e0902393e4c6390181141da056c86150bbdc5b3d: Status 404 returned error can't find the container with id 4003fad972f3135f50c1c0a3e0902393e4c6390181141da056c86150bbdc5b3d Oct 07 08:11:54 crc kubenswrapper[4875]: I1007 08:11:54.267000 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-zg72b" Oct 07 08:11:54 crc kubenswrapper[4875]: I1007 08:11:54.275415 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-4kj9j" Oct 07 08:11:54 crc kubenswrapper[4875]: I1007 08:11:54.302161 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Oct 07 08:11:54 crc kubenswrapper[4875]: W1007 08:11:54.303754 4875 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc187d3cc_62d2_43b8_9616_03670de368da.slice/crio-8d64567000b1e09fec2ee484018c2ec91e8a899acc0a59ab09e9594e3bea66b8 WatchSource:0}: Error finding container 8d64567000b1e09fec2ee484018c2ec91e8a899acc0a59ab09e9594e3bea66b8: Status 404 returned error can't find the container with id 8d64567000b1e09fec2ee484018c2ec91e8a899acc0a59ab09e9594e3bea66b8 Oct 07 08:11:54 crc kubenswrapper[4875]: I1007 08:11:54.352386 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-npwcn"] Oct 07 08:11:54 crc kubenswrapper[4875]: I1007 08:11:54.362414 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Oct 07 08:11:54 crc kubenswrapper[4875]: I1007 08:11:54.365196 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wgf5t\" (UniqueName: \"kubernetes.io/projected/c95ebd05-76be-47ee-ad07-c1e84d34a235-kube-api-access-wgf5t\") pod \"c95ebd05-76be-47ee-ad07-c1e84d34a235\" (UID: \"c95ebd05-76be-47ee-ad07-c1e84d34a235\") " Oct 07 08:11:54 crc kubenswrapper[4875]: I1007 08:11:54.365251 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c95ebd05-76be-47ee-ad07-c1e84d34a235-config\") pod \"c95ebd05-76be-47ee-ad07-c1e84d34a235\" (UID: \"c95ebd05-76be-47ee-ad07-c1e84d34a235\") " Oct 07 08:11:54 crc kubenswrapper[4875]: I1007 08:11:54.365808 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c95ebd05-76be-47ee-ad07-c1e84d34a235-config" (OuterVolumeSpecName: "config") pod "c95ebd05-76be-47ee-ad07-c1e84d34a235" (UID: "c95ebd05-76be-47ee-ad07-c1e84d34a235"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 08:11:54 crc kubenswrapper[4875]: I1007 08:11:54.386967 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c95ebd05-76be-47ee-ad07-c1e84d34a235-kube-api-access-wgf5t" (OuterVolumeSpecName: "kube-api-access-wgf5t") pod "c95ebd05-76be-47ee-ad07-c1e84d34a235" (UID: "c95ebd05-76be-47ee-ad07-c1e84d34a235"). InnerVolumeSpecName "kube-api-access-wgf5t". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 08:11:54 crc kubenswrapper[4875]: I1007 08:11:54.393786 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Oct 07 08:11:54 crc kubenswrapper[4875]: W1007 08:11:54.400344 4875 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4cc2ad0d_e745_4a93_a5fb_d9f891bc3474.slice/crio-274deebffa34f9b4bf4e0287a24cd5b2d09ec0955c7cb21e0754af401d153d7d WatchSource:0}: Error finding container 274deebffa34f9b4bf4e0287a24cd5b2d09ec0955c7cb21e0754af401d153d7d: Status 404 returned error can't find the container with id 274deebffa34f9b4bf4e0287a24cd5b2d09ec0955c7cb21e0754af401d153d7d Oct 07 08:11:54 crc kubenswrapper[4875]: I1007 08:11:54.456393 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Oct 07 08:11:54 crc kubenswrapper[4875]: W1007 08:11:54.462848 4875 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode845f7e2_55f1_445d_8155_8a92bc2ee519.slice/crio-258126e0ca432ee3cc058e5e013b769fb8ab007ad9b3f2f7fcfdf8cfb0745243 WatchSource:0}: Error finding container 258126e0ca432ee3cc058e5e013b769fb8ab007ad9b3f2f7fcfdf8cfb0745243: Status 404 returned error can't find the container with id 258126e0ca432ee3cc058e5e013b769fb8ab007ad9b3f2f7fcfdf8cfb0745243 Oct 07 08:11:54 crc kubenswrapper[4875]: I1007 08:11:54.466032 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1d0e537c-bf41-486e-89dc-b90686db28e7-dns-svc\") pod \"1d0e537c-bf41-486e-89dc-b90686db28e7\" (UID: \"1d0e537c-bf41-486e-89dc-b90686db28e7\") " Oct 07 08:11:54 crc kubenswrapper[4875]: I1007 08:11:54.466129 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wvx6v\" (UniqueName: \"kubernetes.io/projected/1d0e537c-bf41-486e-89dc-b90686db28e7-kube-api-access-wvx6v\") pod \"1d0e537c-bf41-486e-89dc-b90686db28e7\" (UID: \"1d0e537c-bf41-486e-89dc-b90686db28e7\") " Oct 07 08:11:54 crc kubenswrapper[4875]: I1007 08:11:54.466185 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1d0e537c-bf41-486e-89dc-b90686db28e7-config\") pod \"1d0e537c-bf41-486e-89dc-b90686db28e7\" (UID: \"1d0e537c-bf41-486e-89dc-b90686db28e7\") " Oct 07 08:11:54 crc kubenswrapper[4875]: I1007 08:11:54.466596 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wgf5t\" (UniqueName: \"kubernetes.io/projected/c95ebd05-76be-47ee-ad07-c1e84d34a235-kube-api-access-wgf5t\") on node \"crc\" DevicePath \"\"" Oct 07 08:11:54 crc kubenswrapper[4875]: I1007 08:11:54.466609 4875 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c95ebd05-76be-47ee-ad07-c1e84d34a235-config\") on node \"crc\" DevicePath \"\"" Oct 07 08:11:54 crc kubenswrapper[4875]: I1007 08:11:54.467106 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1d0e537c-bf41-486e-89dc-b90686db28e7-config" (OuterVolumeSpecName: "config") pod "1d0e537c-bf41-486e-89dc-b90686db28e7" (UID: "1d0e537c-bf41-486e-89dc-b90686db28e7"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 08:11:54 crc kubenswrapper[4875]: I1007 08:11:54.467482 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1d0e537c-bf41-486e-89dc-b90686db28e7-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "1d0e537c-bf41-486e-89dc-b90686db28e7" (UID: "1d0e537c-bf41-486e-89dc-b90686db28e7"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 08:11:54 crc kubenswrapper[4875]: I1007 08:11:54.472286 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d0e537c-bf41-486e-89dc-b90686db28e7-kube-api-access-wvx6v" (OuterVolumeSpecName: "kube-api-access-wvx6v") pod "1d0e537c-bf41-486e-89dc-b90686db28e7" (UID: "1d0e537c-bf41-486e-89dc-b90686db28e7"). InnerVolumeSpecName "kube-api-access-wvx6v". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 08:11:54 crc kubenswrapper[4875]: I1007 08:11:54.560980 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-vwbf5"] Oct 07 08:11:54 crc kubenswrapper[4875]: W1007 08:11:54.564526 4875 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod318e0040_83b4_4fa3_95ff_768b9eb422e7.slice/crio-8ec793532fba84f1e0e28afe70a1a609d137b65984cc5e1bcb9bf81a32e5c574 WatchSource:0}: Error finding container 8ec793532fba84f1e0e28afe70a1a609d137b65984cc5e1bcb9bf81a32e5c574: Status 404 returned error can't find the container with id 8ec793532fba84f1e0e28afe70a1a609d137b65984cc5e1bcb9bf81a32e5c574 Oct 07 08:11:54 crc kubenswrapper[4875]: I1007 08:11:54.567895 4875 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1d0e537c-bf41-486e-89dc-b90686db28e7-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 07 08:11:54 crc kubenswrapper[4875]: I1007 08:11:54.567916 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wvx6v\" (UniqueName: \"kubernetes.io/projected/1d0e537c-bf41-486e-89dc-b90686db28e7-kube-api-access-wvx6v\") on node \"crc\" DevicePath \"\"" Oct 07 08:11:54 crc kubenswrapper[4875]: I1007 08:11:54.567926 4875 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1d0e537c-bf41-486e-89dc-b90686db28e7-config\") on node \"crc\" DevicePath \"\"" Oct 07 08:11:54 crc kubenswrapper[4875]: I1007 08:11:54.777075 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"c187d3cc-62d2-43b8-9616-03670de368da","Type":"ContainerStarted","Data":"8d64567000b1e09fec2ee484018c2ec91e8a899acc0a59ab09e9594e3bea66b8"} Oct 07 08:11:54 crc kubenswrapper[4875]: I1007 08:11:54.784612 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"e845f7e2-55f1-445d-8155-8a92bc2ee519","Type":"ContainerStarted","Data":"258126e0ca432ee3cc058e5e013b769fb8ab007ad9b3f2f7fcfdf8cfb0745243"} Oct 07 08:11:54 crc kubenswrapper[4875]: I1007 08:11:54.786712 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"027e3c4c-1861-4933-9d30-636575099b5c","Type":"ContainerStarted","Data":"aaef33a516f971288c05a712b139475d217a4716c96cb9b21b6ffb8b71ae5707"} Oct 07 08:11:54 crc kubenswrapper[4875]: I1007 08:11:54.789116 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"8dad2a19-0fbf-46b6-b534-7f2712b644d7","Type":"ContainerStarted","Data":"1310b6cfe5a8c9ea7d3090d8496ca0ff524ff33a34ab84d165bfff7ef8b80001"} Oct 07 08:11:54 crc kubenswrapper[4875]: I1007 08:11:54.789932 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-675f4bcbfc-zg72b" event={"ID":"c95ebd05-76be-47ee-ad07-c1e84d34a235","Type":"ContainerDied","Data":"422247a19217e665002ad25da872b88b21fcca4307bece801a77f9e7580db2bd"} Oct 07 08:11:54 crc kubenswrapper[4875]: I1007 08:11:54.790022 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-zg72b" Oct 07 08:11:54 crc kubenswrapper[4875]: I1007 08:11:54.795305 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78dd6ddcc-4kj9j" event={"ID":"1d0e537c-bf41-486e-89dc-b90686db28e7","Type":"ContainerDied","Data":"a6b55e21a7648aa8a6cccda4fc47a4b665d6e315595f807184a874995c0696fa"} Oct 07 08:11:54 crc kubenswrapper[4875]: I1007 08:11:54.795415 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-4kj9j" Oct 07 08:11:54 crc kubenswrapper[4875]: I1007 08:11:54.800195 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"4cc2ad0d-e745-4a93-a5fb-d9f891bc3474","Type":"ContainerStarted","Data":"274deebffa34f9b4bf4e0287a24cd5b2d09ec0955c7cb21e0754af401d153d7d"} Oct 07 08:11:54 crc kubenswrapper[4875]: I1007 08:11:54.801525 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-vwbf5" event={"ID":"318e0040-83b4-4fa3-95ff-768b9eb422e7","Type":"ContainerStarted","Data":"8ec793532fba84f1e0e28afe70a1a609d137b65984cc5e1bcb9bf81a32e5c574"} Oct 07 08:11:54 crc kubenswrapper[4875]: I1007 08:11:54.802790 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-npwcn" event={"ID":"97a62f1e-e3e0-4592-82a8-2524ba6df291","Type":"ContainerStarted","Data":"5aa872e7e85e7149ac326588fb184d344d3f919785c22ade1acf5fa4bc16a170"} Oct 07 08:11:54 crc kubenswrapper[4875]: I1007 08:11:54.803867 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"021914db-05c8-4ae9-a24e-dee6bfabff00","Type":"ContainerStarted","Data":"4003fad972f3135f50c1c0a3e0902393e4c6390181141da056c86150bbdc5b3d"} Oct 07 08:11:54 crc kubenswrapper[4875]: I1007 08:11:54.870031 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-zg72b"] Oct 07 08:11:54 crc kubenswrapper[4875]: I1007 08:11:54.878594 4875 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-zg72b"] Oct 07 08:11:54 crc kubenswrapper[4875]: I1007 08:11:54.887837 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-4kj9j"] Oct 07 08:11:54 crc kubenswrapper[4875]: I1007 08:11:54.893222 4875 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-4kj9j"] Oct 07 08:11:55 crc kubenswrapper[4875]: I1007 08:11:55.714379 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d0e537c-bf41-486e-89dc-b90686db28e7" path="/var/lib/kubelet/pods/1d0e537c-bf41-486e-89dc-b90686db28e7/volumes" Oct 07 08:11:55 crc kubenswrapper[4875]: I1007 08:11:55.714784 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c95ebd05-76be-47ee-ad07-c1e84d34a235" path="/var/lib/kubelet/pods/c95ebd05-76be-47ee-ad07-c1e84d34a235/volumes" Oct 07 08:12:01 crc kubenswrapper[4875]: I1007 08:12:01.222260 4875 patch_prober.go:28] interesting pod/machine-config-daemon-hx68m container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 07 08:12:01 crc kubenswrapper[4875]: I1007 08:12:01.223358 4875 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" podUID="3928c10c-c3da-41eb-96b2-629d67cfb31f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 07 08:12:01 crc kubenswrapper[4875]: I1007 08:12:01.223441 4875 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" Oct 07 08:12:01 crc kubenswrapper[4875]: I1007 08:12:01.224427 4875 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"74477d8e6df862c07a1243437b7c34a1f70af65519c0c2ffe0b07caf6d4382f4"} pod="openshift-machine-config-operator/machine-config-daemon-hx68m" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 07 08:12:01 crc kubenswrapper[4875]: I1007 08:12:01.224494 4875 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" podUID="3928c10c-c3da-41eb-96b2-629d67cfb31f" containerName="machine-config-daemon" containerID="cri-o://74477d8e6df862c07a1243437b7c34a1f70af65519c0c2ffe0b07caf6d4382f4" gracePeriod=600 Oct 07 08:12:01 crc kubenswrapper[4875]: I1007 08:12:01.878228 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"4cc2ad0d-e745-4a93-a5fb-d9f891bc3474","Type":"ContainerStarted","Data":"03d614cfdedd3694085b98092a9a8c87de8549d4a2848f57bdfc4828432f1f28"} Oct 07 08:12:01 crc kubenswrapper[4875]: I1007 08:12:01.880066 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/memcached-0" Oct 07 08:12:01 crc kubenswrapper[4875]: I1007 08:12:01.884742 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"021914db-05c8-4ae9-a24e-dee6bfabff00","Type":"ContainerStarted","Data":"77bbfb4a0f18f6a9e335810d6dcbd36b86746eabb559cbbbfdef60fd96332c44"} Oct 07 08:12:01 crc kubenswrapper[4875]: I1007 08:12:01.888596 4875 generic.go:334] "Generic (PLEG): container finished" podID="3928c10c-c3da-41eb-96b2-629d67cfb31f" containerID="74477d8e6df862c07a1243437b7c34a1f70af65519c0c2ffe0b07caf6d4382f4" exitCode=0 Oct 07 08:12:01 crc kubenswrapper[4875]: I1007 08:12:01.888715 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" event={"ID":"3928c10c-c3da-41eb-96b2-629d67cfb31f","Type":"ContainerDied","Data":"74477d8e6df862c07a1243437b7c34a1f70af65519c0c2ffe0b07caf6d4382f4"} Oct 07 08:12:01 crc kubenswrapper[4875]: I1007 08:12:01.888799 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" event={"ID":"3928c10c-c3da-41eb-96b2-629d67cfb31f","Type":"ContainerStarted","Data":"933ac15074ad9aef219ca9dd266d407f01a604b86a5e807215e08ee271925427"} Oct 07 08:12:01 crc kubenswrapper[4875]: I1007 08:12:01.888834 4875 scope.go:117] "RemoveContainer" containerID="fd222e466d65c284b99075ccaec23b4472fee8035bb0e466c3b29064fa8b6524" Oct 07 08:12:01 crc kubenswrapper[4875]: I1007 08:12:01.891510 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"e845f7e2-55f1-445d-8155-8a92bc2ee519","Type":"ContainerStarted","Data":"b957184b895bc228a700bf1ee19175e6caf4d4395a7ab774f792dc58f340c1a2"} Oct 07 08:12:01 crc kubenswrapper[4875]: I1007 08:12:01.893627 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"027e3c4c-1861-4933-9d30-636575099b5c","Type":"ContainerStarted","Data":"254238eb10334380e1f480c94aa9efa2e7e7ca0296f5939c3b2ad6290f35e3ef"} Oct 07 08:12:01 crc kubenswrapper[4875]: I1007 08:12:01.895831 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"8dad2a19-0fbf-46b6-b534-7f2712b644d7","Type":"ContainerStarted","Data":"d99ed134e73545077a43b6a053ede8e0a76c1f8ac30b9113d3b0a872726fca6b"} Oct 07 08:12:01 crc kubenswrapper[4875]: I1007 08:12:01.899748 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"c187d3cc-62d2-43b8-9616-03670de368da","Type":"ContainerStarted","Data":"1ec433a903908114f0a6a965638436f4bb59bcd23e2691e9bbf1111cbceaef63"} Oct 07 08:12:01 crc kubenswrapper[4875]: I1007 08:12:01.900819 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Oct 07 08:12:01 crc kubenswrapper[4875]: I1007 08:12:01.907069 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-vwbf5" event={"ID":"318e0040-83b4-4fa3-95ff-768b9eb422e7","Type":"ContainerStarted","Data":"806e2f7f137c14b5ac671c45c38399eda1a30a87a7a89e21ae5367e93d649282"} Oct 07 08:12:01 crc kubenswrapper[4875]: I1007 08:12:01.912817 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/memcached-0" podStartSLOduration=17.946822867 podStartE2EDuration="24.912806438s" podCreationTimestamp="2025-10-07 08:11:37 +0000 UTC" firstStartedPulling="2025-10-07 08:11:54.40313802 +0000 UTC m=+939.362908563" lastFinishedPulling="2025-10-07 08:12:01.369121591 +0000 UTC m=+946.328892134" observedRunningTime="2025-10-07 08:12:01.908281623 +0000 UTC m=+946.868052166" watchObservedRunningTime="2025-10-07 08:12:01.912806438 +0000 UTC m=+946.872576981" Oct 07 08:12:02 crc kubenswrapper[4875]: I1007 08:12:02.030291 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=15.948174841 podStartE2EDuration="23.030260016s" podCreationTimestamp="2025-10-07 08:11:39 +0000 UTC" firstStartedPulling="2025-10-07 08:11:54.308583815 +0000 UTC m=+939.268354358" lastFinishedPulling="2025-10-07 08:12:01.39066899 +0000 UTC m=+946.350439533" observedRunningTime="2025-10-07 08:12:02.026573728 +0000 UTC m=+946.986344271" watchObservedRunningTime="2025-10-07 08:12:02.030260016 +0000 UTC m=+946.990030559" Oct 07 08:12:02 crc kubenswrapper[4875]: I1007 08:12:02.922976 4875 generic.go:334] "Generic (PLEG): container finished" podID="318e0040-83b4-4fa3-95ff-768b9eb422e7" containerID="806e2f7f137c14b5ac671c45c38399eda1a30a87a7a89e21ae5367e93d649282" exitCode=0 Oct 07 08:12:02 crc kubenswrapper[4875]: I1007 08:12:02.923036 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-vwbf5" event={"ID":"318e0040-83b4-4fa3-95ff-768b9eb422e7","Type":"ContainerDied","Data":"806e2f7f137c14b5ac671c45c38399eda1a30a87a7a89e21ae5367e93d649282"} Oct 07 08:12:02 crc kubenswrapper[4875]: I1007 08:12:02.926952 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-npwcn" event={"ID":"97a62f1e-e3e0-4592-82a8-2524ba6df291","Type":"ContainerStarted","Data":"15f149e1ccd2f0b465e52dd80ad2c0522d36a7513cbc198356e8999090947a58"} Oct 07 08:12:02 crc kubenswrapper[4875]: I1007 08:12:02.926998 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-npwcn" Oct 07 08:12:02 crc kubenswrapper[4875]: I1007 08:12:02.968393 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-npwcn" podStartSLOduration=12.914084389 podStartE2EDuration="19.968370684s" podCreationTimestamp="2025-10-07 08:11:43 +0000 UTC" firstStartedPulling="2025-10-07 08:11:54.367085317 +0000 UTC m=+939.326855860" lastFinishedPulling="2025-10-07 08:12:01.421371612 +0000 UTC m=+946.381142155" observedRunningTime="2025-10-07 08:12:02.96791018 +0000 UTC m=+947.927680743" watchObservedRunningTime="2025-10-07 08:12:02.968370684 +0000 UTC m=+947.928141227" Oct 07 08:12:03 crc kubenswrapper[4875]: I1007 08:12:03.938530 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-vwbf5" event={"ID":"318e0040-83b4-4fa3-95ff-768b9eb422e7","Type":"ContainerStarted","Data":"c78c59024c02fb9d0d5312fe9fc7ee9dc10dc4b579639ba7228b487958f7e5e7"} Oct 07 08:12:03 crc kubenswrapper[4875]: I1007 08:12:03.939506 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-vwbf5" event={"ID":"318e0040-83b4-4fa3-95ff-768b9eb422e7","Type":"ContainerStarted","Data":"b1eb0deedd6b88a19663da281cf708fea26c9a5482e4af4c47f36556a07133a7"} Oct 07 08:12:03 crc kubenswrapper[4875]: I1007 08:12:03.995845 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-ovs-vwbf5" podStartSLOduration=14.198653453 podStartE2EDuration="20.995816541s" podCreationTimestamp="2025-10-07 08:11:43 +0000 UTC" firstStartedPulling="2025-10-07 08:11:54.566919502 +0000 UTC m=+939.526690045" lastFinishedPulling="2025-10-07 08:12:01.36408259 +0000 UTC m=+946.323853133" observedRunningTime="2025-10-07 08:12:03.988639711 +0000 UTC m=+948.948410264" watchObservedRunningTime="2025-10-07 08:12:03.995816541 +0000 UTC m=+948.955587094" Oct 07 08:12:04 crc kubenswrapper[4875]: I1007 08:12:04.959608 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-vwbf5" Oct 07 08:12:04 crc kubenswrapper[4875]: I1007 08:12:04.960172 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-vwbf5" Oct 07 08:12:05 crc kubenswrapper[4875]: I1007 08:12:05.975006 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"021914db-05c8-4ae9-a24e-dee6bfabff00","Type":"ContainerStarted","Data":"971fc603d441a7973ed93805ecc986e26a349e7c3f14e7b340af0b25a8a531db"} Oct 07 08:12:05 crc kubenswrapper[4875]: I1007 08:12:05.978540 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"e845f7e2-55f1-445d-8155-8a92bc2ee519","Type":"ContainerStarted","Data":"0343607d6ca993ae7cdf2c29ed18e907f75d80c408ba3a7dec107331bf29437f"} Oct 07 08:12:05 crc kubenswrapper[4875]: I1007 08:12:05.981936 4875 generic.go:334] "Generic (PLEG): container finished" podID="027e3c4c-1861-4933-9d30-636575099b5c" containerID="254238eb10334380e1f480c94aa9efa2e7e7ca0296f5939c3b2ad6290f35e3ef" exitCode=0 Oct 07 08:12:05 crc kubenswrapper[4875]: I1007 08:12:05.982025 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"027e3c4c-1861-4933-9d30-636575099b5c","Type":"ContainerDied","Data":"254238eb10334380e1f480c94aa9efa2e7e7ca0296f5939c3b2ad6290f35e3ef"} Oct 07 08:12:05 crc kubenswrapper[4875]: I1007 08:12:05.987936 4875 generic.go:334] "Generic (PLEG): container finished" podID="8dad2a19-0fbf-46b6-b534-7f2712b644d7" containerID="d99ed134e73545077a43b6a053ede8e0a76c1f8ac30b9113d3b0a872726fca6b" exitCode=0 Oct 07 08:12:05 crc kubenswrapper[4875]: I1007 08:12:05.987995 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"8dad2a19-0fbf-46b6-b534-7f2712b644d7","Type":"ContainerDied","Data":"d99ed134e73545077a43b6a053ede8e0a76c1f8ac30b9113d3b0a872726fca6b"} Oct 07 08:12:06 crc kubenswrapper[4875]: I1007 08:12:06.010929 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-sb-0" podStartSLOduration=12.321762765999999 podStartE2EDuration="23.010862849s" podCreationTimestamp="2025-10-07 08:11:43 +0000 UTC" firstStartedPulling="2025-10-07 08:11:54.236687175 +0000 UTC m=+939.196457718" lastFinishedPulling="2025-10-07 08:12:04.925787258 +0000 UTC m=+949.885557801" observedRunningTime="2025-10-07 08:12:05.9999546 +0000 UTC m=+950.959725193" watchObservedRunningTime="2025-10-07 08:12:06.010862849 +0000 UTC m=+950.970633422" Oct 07 08:12:06 crc kubenswrapper[4875]: I1007 08:12:06.079988 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-nb-0" podStartSLOduration=9.601188726 podStartE2EDuration="20.0799604s" podCreationTimestamp="2025-10-07 08:11:46 +0000 UTC" firstStartedPulling="2025-10-07 08:11:54.465264718 +0000 UTC m=+939.425035261" lastFinishedPulling="2025-10-07 08:12:04.944036392 +0000 UTC m=+949.903806935" observedRunningTime="2025-10-07 08:12:06.068120121 +0000 UTC m=+951.027890684" watchObservedRunningTime="2025-10-07 08:12:06.0799604 +0000 UTC m=+951.039730943" Oct 07 08:12:06 crc kubenswrapper[4875]: I1007 08:12:06.825104 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-metrics-6rsbk"] Oct 07 08:12:06 crc kubenswrapper[4875]: I1007 08:12:06.826613 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-6rsbk" Oct 07 08:12:06 crc kubenswrapper[4875]: I1007 08:12:06.830507 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-metrics-config" Oct 07 08:12:06 crc kubenswrapper[4875]: I1007 08:12:06.840941 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-6rsbk"] Oct 07 08:12:06 crc kubenswrapper[4875]: I1007 08:12:06.946821 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/e984b1d0-f011-46a5-8339-966f44e3c603-ovs-rundir\") pod \"ovn-controller-metrics-6rsbk\" (UID: \"e984b1d0-f011-46a5-8339-966f44e3c603\") " pod="openstack/ovn-controller-metrics-6rsbk" Oct 07 08:12:06 crc kubenswrapper[4875]: I1007 08:12:06.946864 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/e984b1d0-f011-46a5-8339-966f44e3c603-ovn-rundir\") pod \"ovn-controller-metrics-6rsbk\" (UID: \"e984b1d0-f011-46a5-8339-966f44e3c603\") " pod="openstack/ovn-controller-metrics-6rsbk" Oct 07 08:12:06 crc kubenswrapper[4875]: I1007 08:12:06.946973 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8tb2r\" (UniqueName: \"kubernetes.io/projected/e984b1d0-f011-46a5-8339-966f44e3c603-kube-api-access-8tb2r\") pod \"ovn-controller-metrics-6rsbk\" (UID: \"e984b1d0-f011-46a5-8339-966f44e3c603\") " pod="openstack/ovn-controller-metrics-6rsbk" Oct 07 08:12:06 crc kubenswrapper[4875]: I1007 08:12:06.947034 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e984b1d0-f011-46a5-8339-966f44e3c603-combined-ca-bundle\") pod \"ovn-controller-metrics-6rsbk\" (UID: \"e984b1d0-f011-46a5-8339-966f44e3c603\") " pod="openstack/ovn-controller-metrics-6rsbk" Oct 07 08:12:06 crc kubenswrapper[4875]: I1007 08:12:06.947066 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e984b1d0-f011-46a5-8339-966f44e3c603-config\") pod \"ovn-controller-metrics-6rsbk\" (UID: \"e984b1d0-f011-46a5-8339-966f44e3c603\") " pod="openstack/ovn-controller-metrics-6rsbk" Oct 07 08:12:06 crc kubenswrapper[4875]: I1007 08:12:06.947090 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/e984b1d0-f011-46a5-8339-966f44e3c603-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-6rsbk\" (UID: \"e984b1d0-f011-46a5-8339-966f44e3c603\") " pod="openstack/ovn-controller-metrics-6rsbk" Oct 07 08:12:06 crc kubenswrapper[4875]: I1007 08:12:06.975557 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-qbh8c"] Oct 07 08:12:07 crc kubenswrapper[4875]: I1007 08:12:07.008172 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"027e3c4c-1861-4933-9d30-636575099b5c","Type":"ContainerStarted","Data":"aebb88db97fac1a1005447db579fbe272c4229ce0f6b15fa8563c881c9b457ae"} Oct 07 08:12:07 crc kubenswrapper[4875]: I1007 08:12:07.013045 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"8dad2a19-0fbf-46b6-b534-7f2712b644d7","Type":"ContainerStarted","Data":"71c0d3c4043f5a3bd8765ee22041c1a680e0c94cc9ed3537fd19151fe270181a"} Oct 07 08:12:07 crc kubenswrapper[4875]: I1007 08:12:07.019614 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6bc7876d45-s9gjw"] Oct 07 08:12:07 crc kubenswrapper[4875]: I1007 08:12:07.021056 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6bc7876d45-s9gjw" Oct 07 08:12:07 crc kubenswrapper[4875]: I1007 08:12:07.034710 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-sb" Oct 07 08:12:07 crc kubenswrapper[4875]: I1007 08:12:07.046826 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-galera-0" podStartSLOduration=24.036227591 podStartE2EDuration="31.046809828s" podCreationTimestamp="2025-10-07 08:11:36 +0000 UTC" firstStartedPulling="2025-10-07 08:11:54.38250359 +0000 UTC m=+939.342274133" lastFinishedPulling="2025-10-07 08:12:01.393085837 +0000 UTC m=+946.352856370" observedRunningTime="2025-10-07 08:12:07.04565396 +0000 UTC m=+952.005424503" watchObservedRunningTime="2025-10-07 08:12:07.046809828 +0000 UTC m=+952.006580371" Oct 07 08:12:07 crc kubenswrapper[4875]: I1007 08:12:07.048697 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8tb2r\" (UniqueName: \"kubernetes.io/projected/e984b1d0-f011-46a5-8339-966f44e3c603-kube-api-access-8tb2r\") pod \"ovn-controller-metrics-6rsbk\" (UID: \"e984b1d0-f011-46a5-8339-966f44e3c603\") " pod="openstack/ovn-controller-metrics-6rsbk" Oct 07 08:12:07 crc kubenswrapper[4875]: I1007 08:12:07.048782 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e984b1d0-f011-46a5-8339-966f44e3c603-combined-ca-bundle\") pod \"ovn-controller-metrics-6rsbk\" (UID: \"e984b1d0-f011-46a5-8339-966f44e3c603\") " pod="openstack/ovn-controller-metrics-6rsbk" Oct 07 08:12:07 crc kubenswrapper[4875]: I1007 08:12:07.048824 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e984b1d0-f011-46a5-8339-966f44e3c603-config\") pod \"ovn-controller-metrics-6rsbk\" (UID: \"e984b1d0-f011-46a5-8339-966f44e3c603\") " pod="openstack/ovn-controller-metrics-6rsbk" Oct 07 08:12:07 crc kubenswrapper[4875]: I1007 08:12:07.050695 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e984b1d0-f011-46a5-8339-966f44e3c603-config\") pod \"ovn-controller-metrics-6rsbk\" (UID: \"e984b1d0-f011-46a5-8339-966f44e3c603\") " pod="openstack/ovn-controller-metrics-6rsbk" Oct 07 08:12:07 crc kubenswrapper[4875]: I1007 08:12:07.063277 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/e984b1d0-f011-46a5-8339-966f44e3c603-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-6rsbk\" (UID: \"e984b1d0-f011-46a5-8339-966f44e3c603\") " pod="openstack/ovn-controller-metrics-6rsbk" Oct 07 08:12:07 crc kubenswrapper[4875]: I1007 08:12:07.063488 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/e984b1d0-f011-46a5-8339-966f44e3c603-ovs-rundir\") pod \"ovn-controller-metrics-6rsbk\" (UID: \"e984b1d0-f011-46a5-8339-966f44e3c603\") " pod="openstack/ovn-controller-metrics-6rsbk" Oct 07 08:12:07 crc kubenswrapper[4875]: I1007 08:12:07.063523 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/e984b1d0-f011-46a5-8339-966f44e3c603-ovn-rundir\") pod \"ovn-controller-metrics-6rsbk\" (UID: \"e984b1d0-f011-46a5-8339-966f44e3c603\") " pod="openstack/ovn-controller-metrics-6rsbk" Oct 07 08:12:07 crc kubenswrapper[4875]: I1007 08:12:07.064783 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/e984b1d0-f011-46a5-8339-966f44e3c603-ovs-rundir\") pod \"ovn-controller-metrics-6rsbk\" (UID: \"e984b1d0-f011-46a5-8339-966f44e3c603\") " pod="openstack/ovn-controller-metrics-6rsbk" Oct 07 08:12:07 crc kubenswrapper[4875]: I1007 08:12:07.064978 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/e984b1d0-f011-46a5-8339-966f44e3c603-ovn-rundir\") pod \"ovn-controller-metrics-6rsbk\" (UID: \"e984b1d0-f011-46a5-8339-966f44e3c603\") " pod="openstack/ovn-controller-metrics-6rsbk" Oct 07 08:12:07 crc kubenswrapper[4875]: I1007 08:12:07.066302 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e984b1d0-f011-46a5-8339-966f44e3c603-combined-ca-bundle\") pod \"ovn-controller-metrics-6rsbk\" (UID: \"e984b1d0-f011-46a5-8339-966f44e3c603\") " pod="openstack/ovn-controller-metrics-6rsbk" Oct 07 08:12:07 crc kubenswrapper[4875]: I1007 08:12:07.069203 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6bc7876d45-s9gjw"] Oct 07 08:12:07 crc kubenswrapper[4875]: I1007 08:12:07.069336 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/e984b1d0-f011-46a5-8339-966f44e3c603-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-6rsbk\" (UID: \"e984b1d0-f011-46a5-8339-966f44e3c603\") " pod="openstack/ovn-controller-metrics-6rsbk" Oct 07 08:12:07 crc kubenswrapper[4875]: I1007 08:12:07.090686 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8tb2r\" (UniqueName: \"kubernetes.io/projected/e984b1d0-f011-46a5-8339-966f44e3c603-kube-api-access-8tb2r\") pod \"ovn-controller-metrics-6rsbk\" (UID: \"e984b1d0-f011-46a5-8339-966f44e3c603\") " pod="openstack/ovn-controller-metrics-6rsbk" Oct 07 08:12:07 crc kubenswrapper[4875]: I1007 08:12:07.117621 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-cell1-galera-0" podStartSLOduration=23.884752834 podStartE2EDuration="31.117595203s" podCreationTimestamp="2025-10-07 08:11:36 +0000 UTC" firstStartedPulling="2025-10-07 08:11:54.131107016 +0000 UTC m=+939.090877559" lastFinishedPulling="2025-10-07 08:12:01.363949385 +0000 UTC m=+946.323719928" observedRunningTime="2025-10-07 08:12:07.111815778 +0000 UTC m=+952.071586331" watchObservedRunningTime="2025-10-07 08:12:07.117595203 +0000 UTC m=+952.077365746" Oct 07 08:12:07 crc kubenswrapper[4875]: I1007 08:12:07.146291 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-6rsbk" Oct 07 08:12:07 crc kubenswrapper[4875]: I1007 08:12:07.239383 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5ccc8479f9-sx8tp"] Oct 07 08:12:07 crc kubenswrapper[4875]: I1007 08:12:07.258203 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-8554648995-lwp8c"] Oct 07 08:12:07 crc kubenswrapper[4875]: I1007 08:12:07.259758 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8554648995-lwp8c" Oct 07 08:12:07 crc kubenswrapper[4875]: I1007 08:12:07.267368 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-nb" Oct 07 08:12:07 crc kubenswrapper[4875]: I1007 08:12:07.268974 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e2e77fdd-ebb0-4419-8981-7a70c201d833-dns-svc\") pod \"dnsmasq-dns-6bc7876d45-s9gjw\" (UID: \"e2e77fdd-ebb0-4419-8981-7a70c201d833\") " pod="openstack/dnsmasq-dns-6bc7876d45-s9gjw" Oct 07 08:12:07 crc kubenswrapper[4875]: I1007 08:12:07.269030 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e2e77fdd-ebb0-4419-8981-7a70c201d833-config\") pod \"dnsmasq-dns-6bc7876d45-s9gjw\" (UID: \"e2e77fdd-ebb0-4419-8981-7a70c201d833\") " pod="openstack/dnsmasq-dns-6bc7876d45-s9gjw" Oct 07 08:12:07 crc kubenswrapper[4875]: I1007 08:12:07.269059 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vhmps\" (UniqueName: \"kubernetes.io/projected/e2e77fdd-ebb0-4419-8981-7a70c201d833-kube-api-access-vhmps\") pod \"dnsmasq-dns-6bc7876d45-s9gjw\" (UID: \"e2e77fdd-ebb0-4419-8981-7a70c201d833\") " pod="openstack/dnsmasq-dns-6bc7876d45-s9gjw" Oct 07 08:12:07 crc kubenswrapper[4875]: I1007 08:12:07.269095 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e2e77fdd-ebb0-4419-8981-7a70c201d833-ovsdbserver-sb\") pod \"dnsmasq-dns-6bc7876d45-s9gjw\" (UID: \"e2e77fdd-ebb0-4419-8981-7a70c201d833\") " pod="openstack/dnsmasq-dns-6bc7876d45-s9gjw" Oct 07 08:12:07 crc kubenswrapper[4875]: I1007 08:12:07.288817 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-8554648995-lwp8c"] Oct 07 08:12:07 crc kubenswrapper[4875]: I1007 08:12:07.373525 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e2e77fdd-ebb0-4419-8981-7a70c201d833-dns-svc\") pod \"dnsmasq-dns-6bc7876d45-s9gjw\" (UID: \"e2e77fdd-ebb0-4419-8981-7a70c201d833\") " pod="openstack/dnsmasq-dns-6bc7876d45-s9gjw" Oct 07 08:12:07 crc kubenswrapper[4875]: I1007 08:12:07.373592 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xqhqj\" (UniqueName: \"kubernetes.io/projected/9e6f843e-4ba3-46ef-b9ee-2ced2a5e201c-kube-api-access-xqhqj\") pod \"dnsmasq-dns-8554648995-lwp8c\" (UID: \"9e6f843e-4ba3-46ef-b9ee-2ced2a5e201c\") " pod="openstack/dnsmasq-dns-8554648995-lwp8c" Oct 07 08:12:07 crc kubenswrapper[4875]: I1007 08:12:07.373623 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9e6f843e-4ba3-46ef-b9ee-2ced2a5e201c-ovsdbserver-sb\") pod \"dnsmasq-dns-8554648995-lwp8c\" (UID: \"9e6f843e-4ba3-46ef-b9ee-2ced2a5e201c\") " pod="openstack/dnsmasq-dns-8554648995-lwp8c" Oct 07 08:12:07 crc kubenswrapper[4875]: I1007 08:12:07.373641 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9e6f843e-4ba3-46ef-b9ee-2ced2a5e201c-ovsdbserver-nb\") pod \"dnsmasq-dns-8554648995-lwp8c\" (UID: \"9e6f843e-4ba3-46ef-b9ee-2ced2a5e201c\") " pod="openstack/dnsmasq-dns-8554648995-lwp8c" Oct 07 08:12:07 crc kubenswrapper[4875]: I1007 08:12:07.373667 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e2e77fdd-ebb0-4419-8981-7a70c201d833-config\") pod \"dnsmasq-dns-6bc7876d45-s9gjw\" (UID: \"e2e77fdd-ebb0-4419-8981-7a70c201d833\") " pod="openstack/dnsmasq-dns-6bc7876d45-s9gjw" Oct 07 08:12:07 crc kubenswrapper[4875]: I1007 08:12:07.373702 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vhmps\" (UniqueName: \"kubernetes.io/projected/e2e77fdd-ebb0-4419-8981-7a70c201d833-kube-api-access-vhmps\") pod \"dnsmasq-dns-6bc7876d45-s9gjw\" (UID: \"e2e77fdd-ebb0-4419-8981-7a70c201d833\") " pod="openstack/dnsmasq-dns-6bc7876d45-s9gjw" Oct 07 08:12:07 crc kubenswrapper[4875]: I1007 08:12:07.373745 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e2e77fdd-ebb0-4419-8981-7a70c201d833-ovsdbserver-sb\") pod \"dnsmasq-dns-6bc7876d45-s9gjw\" (UID: \"e2e77fdd-ebb0-4419-8981-7a70c201d833\") " pod="openstack/dnsmasq-dns-6bc7876d45-s9gjw" Oct 07 08:12:07 crc kubenswrapper[4875]: I1007 08:12:07.373777 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9e6f843e-4ba3-46ef-b9ee-2ced2a5e201c-config\") pod \"dnsmasq-dns-8554648995-lwp8c\" (UID: \"9e6f843e-4ba3-46ef-b9ee-2ced2a5e201c\") " pod="openstack/dnsmasq-dns-8554648995-lwp8c" Oct 07 08:12:07 crc kubenswrapper[4875]: I1007 08:12:07.373802 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9e6f843e-4ba3-46ef-b9ee-2ced2a5e201c-dns-svc\") pod \"dnsmasq-dns-8554648995-lwp8c\" (UID: \"9e6f843e-4ba3-46ef-b9ee-2ced2a5e201c\") " pod="openstack/dnsmasq-dns-8554648995-lwp8c" Oct 07 08:12:07 crc kubenswrapper[4875]: I1007 08:12:07.374584 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e2e77fdd-ebb0-4419-8981-7a70c201d833-dns-svc\") pod \"dnsmasq-dns-6bc7876d45-s9gjw\" (UID: \"e2e77fdd-ebb0-4419-8981-7a70c201d833\") " pod="openstack/dnsmasq-dns-6bc7876d45-s9gjw" Oct 07 08:12:07 crc kubenswrapper[4875]: I1007 08:12:07.374859 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e2e77fdd-ebb0-4419-8981-7a70c201d833-config\") pod \"dnsmasq-dns-6bc7876d45-s9gjw\" (UID: \"e2e77fdd-ebb0-4419-8981-7a70c201d833\") " pod="openstack/dnsmasq-dns-6bc7876d45-s9gjw" Oct 07 08:12:07 crc kubenswrapper[4875]: I1007 08:12:07.374999 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e2e77fdd-ebb0-4419-8981-7a70c201d833-ovsdbserver-sb\") pod \"dnsmasq-dns-6bc7876d45-s9gjw\" (UID: \"e2e77fdd-ebb0-4419-8981-7a70c201d833\") " pod="openstack/dnsmasq-dns-6bc7876d45-s9gjw" Oct 07 08:12:07 crc kubenswrapper[4875]: I1007 08:12:07.392002 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vhmps\" (UniqueName: \"kubernetes.io/projected/e2e77fdd-ebb0-4419-8981-7a70c201d833-kube-api-access-vhmps\") pod \"dnsmasq-dns-6bc7876d45-s9gjw\" (UID: \"e2e77fdd-ebb0-4419-8981-7a70c201d833\") " pod="openstack/dnsmasq-dns-6bc7876d45-s9gjw" Oct 07 08:12:07 crc kubenswrapper[4875]: I1007 08:12:07.419164 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-nb-0" Oct 07 08:12:07 crc kubenswrapper[4875]: I1007 08:12:07.475044 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xqhqj\" (UniqueName: \"kubernetes.io/projected/9e6f843e-4ba3-46ef-b9ee-2ced2a5e201c-kube-api-access-xqhqj\") pod \"dnsmasq-dns-8554648995-lwp8c\" (UID: \"9e6f843e-4ba3-46ef-b9ee-2ced2a5e201c\") " pod="openstack/dnsmasq-dns-8554648995-lwp8c" Oct 07 08:12:07 crc kubenswrapper[4875]: I1007 08:12:07.475115 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9e6f843e-4ba3-46ef-b9ee-2ced2a5e201c-ovsdbserver-sb\") pod \"dnsmasq-dns-8554648995-lwp8c\" (UID: \"9e6f843e-4ba3-46ef-b9ee-2ced2a5e201c\") " pod="openstack/dnsmasq-dns-8554648995-lwp8c" Oct 07 08:12:07 crc kubenswrapper[4875]: I1007 08:12:07.475145 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9e6f843e-4ba3-46ef-b9ee-2ced2a5e201c-ovsdbserver-nb\") pod \"dnsmasq-dns-8554648995-lwp8c\" (UID: \"9e6f843e-4ba3-46ef-b9ee-2ced2a5e201c\") " pod="openstack/dnsmasq-dns-8554648995-lwp8c" Oct 07 08:12:07 crc kubenswrapper[4875]: I1007 08:12:07.476258 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9e6f843e-4ba3-46ef-b9ee-2ced2a5e201c-ovsdbserver-sb\") pod \"dnsmasq-dns-8554648995-lwp8c\" (UID: \"9e6f843e-4ba3-46ef-b9ee-2ced2a5e201c\") " pod="openstack/dnsmasq-dns-8554648995-lwp8c" Oct 07 08:12:07 crc kubenswrapper[4875]: I1007 08:12:07.476387 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9e6f843e-4ba3-46ef-b9ee-2ced2a5e201c-ovsdbserver-nb\") pod \"dnsmasq-dns-8554648995-lwp8c\" (UID: \"9e6f843e-4ba3-46ef-b9ee-2ced2a5e201c\") " pod="openstack/dnsmasq-dns-8554648995-lwp8c" Oct 07 08:12:07 crc kubenswrapper[4875]: I1007 08:12:07.476569 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9e6f843e-4ba3-46ef-b9ee-2ced2a5e201c-config\") pod \"dnsmasq-dns-8554648995-lwp8c\" (UID: \"9e6f843e-4ba3-46ef-b9ee-2ced2a5e201c\") " pod="openstack/dnsmasq-dns-8554648995-lwp8c" Oct 07 08:12:07 crc kubenswrapper[4875]: I1007 08:12:07.476598 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9e6f843e-4ba3-46ef-b9ee-2ced2a5e201c-dns-svc\") pod \"dnsmasq-dns-8554648995-lwp8c\" (UID: \"9e6f843e-4ba3-46ef-b9ee-2ced2a5e201c\") " pod="openstack/dnsmasq-dns-8554648995-lwp8c" Oct 07 08:12:07 crc kubenswrapper[4875]: I1007 08:12:07.477263 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9e6f843e-4ba3-46ef-b9ee-2ced2a5e201c-config\") pod \"dnsmasq-dns-8554648995-lwp8c\" (UID: \"9e6f843e-4ba3-46ef-b9ee-2ced2a5e201c\") " pod="openstack/dnsmasq-dns-8554648995-lwp8c" Oct 07 08:12:07 crc kubenswrapper[4875]: I1007 08:12:07.477560 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9e6f843e-4ba3-46ef-b9ee-2ced2a5e201c-dns-svc\") pod \"dnsmasq-dns-8554648995-lwp8c\" (UID: \"9e6f843e-4ba3-46ef-b9ee-2ced2a5e201c\") " pod="openstack/dnsmasq-dns-8554648995-lwp8c" Oct 07 08:12:07 crc kubenswrapper[4875]: I1007 08:12:07.499912 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xqhqj\" (UniqueName: \"kubernetes.io/projected/9e6f843e-4ba3-46ef-b9ee-2ced2a5e201c-kube-api-access-xqhqj\") pod \"dnsmasq-dns-8554648995-lwp8c\" (UID: \"9e6f843e-4ba3-46ef-b9ee-2ced2a5e201c\") " pod="openstack/dnsmasq-dns-8554648995-lwp8c" Oct 07 08:12:07 crc kubenswrapper[4875]: I1007 08:12:07.631989 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-6rsbk"] Oct 07 08:12:07 crc kubenswrapper[4875]: I1007 08:12:07.661834 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6bc7876d45-s9gjw" Oct 07 08:12:07 crc kubenswrapper[4875]: I1007 08:12:07.696362 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8554648995-lwp8c" Oct 07 08:12:07 crc kubenswrapper[4875]: W1007 08:12:07.733642 4875 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode984b1d0_f011_46a5_8339_966f44e3c603.slice/crio-de747d9426219db7f6d4f03a02a7664852683f5342a9bccbc174d7dec4632bc0 WatchSource:0}: Error finding container de747d9426219db7f6d4f03a02a7664852683f5342a9bccbc174d7dec4632bc0: Status 404 returned error can't find the container with id de747d9426219db7f6d4f03a02a7664852683f5342a9bccbc174d7dec4632bc0 Oct 07 08:12:07 crc kubenswrapper[4875]: I1007 08:12:07.814363 4875 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-galera-0" Oct 07 08:12:07 crc kubenswrapper[4875]: I1007 08:12:07.814404 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-galera-0" Oct 07 08:12:07 crc kubenswrapper[4875]: I1007 08:12:07.831283 4875 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-cell1-galera-0" Oct 07 08:12:07 crc kubenswrapper[4875]: I1007 08:12:07.831345 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-cell1-galera-0" Oct 07 08:12:08 crc kubenswrapper[4875]: I1007 08:12:08.028585 4875 generic.go:334] "Generic (PLEG): container finished" podID="12c7ef4c-d13d-4813-9e2f-37197f79207e" containerID="b53413e7e8db1cc219ffea2b9a6a2015b0d36faaf48310e19d25e9bfb6b3af8e" exitCode=0 Oct 07 08:12:08 crc kubenswrapper[4875]: I1007 08:12:08.028969 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5ccc8479f9-sx8tp" event={"ID":"12c7ef4c-d13d-4813-9e2f-37197f79207e","Type":"ContainerDied","Data":"b53413e7e8db1cc219ffea2b9a6a2015b0d36faaf48310e19d25e9bfb6b3af8e"} Oct 07 08:12:08 crc kubenswrapper[4875]: I1007 08:12:08.031433 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"169a8fe1-831d-46f5-b939-e1507c89453e","Type":"ContainerStarted","Data":"74829d972ae0ba9f879c590cdb2f186562e49f7a189fcb7398f145b5eb9ec846"} Oct 07 08:12:08 crc kubenswrapper[4875]: I1007 08:12:08.036413 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-6rsbk" event={"ID":"e984b1d0-f011-46a5-8339-966f44e3c603","Type":"ContainerStarted","Data":"de747d9426219db7f6d4f03a02a7664852683f5342a9bccbc174d7dec4632bc0"} Oct 07 08:12:08 crc kubenswrapper[4875]: I1007 08:12:08.038051 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"06c908ee-f087-4e43-904f-5cc1e01a2464","Type":"ContainerStarted","Data":"5f71368c43c156d25b2ce09ab20d0658f6aa74db195a97c64816049fe1afdd14"} Oct 07 08:12:08 crc kubenswrapper[4875]: I1007 08:12:08.039916 4875 generic.go:334] "Generic (PLEG): container finished" podID="13763859-5bdc-495b-b344-b2b8e0b7fd1e" containerID="0a2c9c1424ee00ce05dc33ea7732bdfa24caa300aa1e151c387e990dfdce5422" exitCode=0 Oct 07 08:12:08 crc kubenswrapper[4875]: I1007 08:12:08.040244 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-qbh8c" event={"ID":"13763859-5bdc-495b-b344-b2b8e0b7fd1e","Type":"ContainerDied","Data":"0a2c9c1424ee00ce05dc33ea7732bdfa24caa300aa1e151c387e990dfdce5422"} Oct 07 08:12:08 crc kubenswrapper[4875]: I1007 08:12:08.107179 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-metrics-6rsbk" podStartSLOduration=2.107162697 podStartE2EDuration="2.107162697s" podCreationTimestamp="2025-10-07 08:12:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 08:12:08.105791844 +0000 UTC m=+953.065562387" watchObservedRunningTime="2025-10-07 08:12:08.107162697 +0000 UTC m=+953.066933240" Oct 07 08:12:08 crc kubenswrapper[4875]: I1007 08:12:08.125861 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-8554648995-lwp8c"] Oct 07 08:12:08 crc kubenswrapper[4875]: I1007 08:12:08.272779 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6bc7876d45-s9gjw"] Oct 07 08:12:08 crc kubenswrapper[4875]: I1007 08:12:08.351244 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/memcached-0" Oct 07 08:12:08 crc kubenswrapper[4875]: I1007 08:12:08.418938 4875 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-nb-0" Oct 07 08:12:08 crc kubenswrapper[4875]: I1007 08:12:08.450504 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5ccc8479f9-sx8tp" Oct 07 08:12:08 crc kubenswrapper[4875]: I1007 08:12:08.469184 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-qbh8c" Oct 07 08:12:08 crc kubenswrapper[4875]: I1007 08:12:08.482605 4875 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-nb-0" Oct 07 08:12:08 crc kubenswrapper[4875]: I1007 08:12:08.612771 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/12c7ef4c-d13d-4813-9e2f-37197f79207e-config\") pod \"12c7ef4c-d13d-4813-9e2f-37197f79207e\" (UID: \"12c7ef4c-d13d-4813-9e2f-37197f79207e\") " Oct 07 08:12:08 crc kubenswrapper[4875]: I1007 08:12:08.612842 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/13763859-5bdc-495b-b344-b2b8e0b7fd1e-dns-svc\") pod \"13763859-5bdc-495b-b344-b2b8e0b7fd1e\" (UID: \"13763859-5bdc-495b-b344-b2b8e0b7fd1e\") " Oct 07 08:12:08 crc kubenswrapper[4875]: I1007 08:12:08.612904 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q6w99\" (UniqueName: \"kubernetes.io/projected/12c7ef4c-d13d-4813-9e2f-37197f79207e-kube-api-access-q6w99\") pod \"12c7ef4c-d13d-4813-9e2f-37197f79207e\" (UID: \"12c7ef4c-d13d-4813-9e2f-37197f79207e\") " Oct 07 08:12:08 crc kubenswrapper[4875]: I1007 08:12:08.612965 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/12c7ef4c-d13d-4813-9e2f-37197f79207e-dns-svc\") pod \"12c7ef4c-d13d-4813-9e2f-37197f79207e\" (UID: \"12c7ef4c-d13d-4813-9e2f-37197f79207e\") " Oct 07 08:12:08 crc kubenswrapper[4875]: I1007 08:12:08.613013 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bvnch\" (UniqueName: \"kubernetes.io/projected/13763859-5bdc-495b-b344-b2b8e0b7fd1e-kube-api-access-bvnch\") pod \"13763859-5bdc-495b-b344-b2b8e0b7fd1e\" (UID: \"13763859-5bdc-495b-b344-b2b8e0b7fd1e\") " Oct 07 08:12:08 crc kubenswrapper[4875]: I1007 08:12:08.613114 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/13763859-5bdc-495b-b344-b2b8e0b7fd1e-config\") pod \"13763859-5bdc-495b-b344-b2b8e0b7fd1e\" (UID: \"13763859-5bdc-495b-b344-b2b8e0b7fd1e\") " Oct 07 08:12:08 crc kubenswrapper[4875]: I1007 08:12:08.619100 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/12c7ef4c-d13d-4813-9e2f-37197f79207e-kube-api-access-q6w99" (OuterVolumeSpecName: "kube-api-access-q6w99") pod "12c7ef4c-d13d-4813-9e2f-37197f79207e" (UID: "12c7ef4c-d13d-4813-9e2f-37197f79207e"). InnerVolumeSpecName "kube-api-access-q6w99". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 08:12:08 crc kubenswrapper[4875]: I1007 08:12:08.619440 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/13763859-5bdc-495b-b344-b2b8e0b7fd1e-kube-api-access-bvnch" (OuterVolumeSpecName: "kube-api-access-bvnch") pod "13763859-5bdc-495b-b344-b2b8e0b7fd1e" (UID: "13763859-5bdc-495b-b344-b2b8e0b7fd1e"). InnerVolumeSpecName "kube-api-access-bvnch". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 08:12:08 crc kubenswrapper[4875]: I1007 08:12:08.632597 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/12c7ef4c-d13d-4813-9e2f-37197f79207e-config" (OuterVolumeSpecName: "config") pod "12c7ef4c-d13d-4813-9e2f-37197f79207e" (UID: "12c7ef4c-d13d-4813-9e2f-37197f79207e"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 08:12:08 crc kubenswrapper[4875]: I1007 08:12:08.636085 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/12c7ef4c-d13d-4813-9e2f-37197f79207e-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "12c7ef4c-d13d-4813-9e2f-37197f79207e" (UID: "12c7ef4c-d13d-4813-9e2f-37197f79207e"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 08:12:08 crc kubenswrapper[4875]: I1007 08:12:08.637039 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/13763859-5bdc-495b-b344-b2b8e0b7fd1e-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "13763859-5bdc-495b-b344-b2b8e0b7fd1e" (UID: "13763859-5bdc-495b-b344-b2b8e0b7fd1e"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 08:12:08 crc kubenswrapper[4875]: I1007 08:12:08.648005 4875 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-sb-0" Oct 07 08:12:08 crc kubenswrapper[4875]: I1007 08:12:08.650345 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/13763859-5bdc-495b-b344-b2b8e0b7fd1e-config" (OuterVolumeSpecName: "config") pod "13763859-5bdc-495b-b344-b2b8e0b7fd1e" (UID: "13763859-5bdc-495b-b344-b2b8e0b7fd1e"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 08:12:08 crc kubenswrapper[4875]: I1007 08:12:08.696569 4875 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-sb-0" Oct 07 08:12:08 crc kubenswrapper[4875]: I1007 08:12:08.715632 4875 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/12c7ef4c-d13d-4813-9e2f-37197f79207e-config\") on node \"crc\" DevicePath \"\"" Oct 07 08:12:08 crc kubenswrapper[4875]: I1007 08:12:08.715687 4875 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/13763859-5bdc-495b-b344-b2b8e0b7fd1e-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 07 08:12:08 crc kubenswrapper[4875]: I1007 08:12:08.715735 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q6w99\" (UniqueName: \"kubernetes.io/projected/12c7ef4c-d13d-4813-9e2f-37197f79207e-kube-api-access-q6w99\") on node \"crc\" DevicePath \"\"" Oct 07 08:12:08 crc kubenswrapper[4875]: I1007 08:12:08.715759 4875 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/12c7ef4c-d13d-4813-9e2f-37197f79207e-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 07 08:12:08 crc kubenswrapper[4875]: I1007 08:12:08.715776 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bvnch\" (UniqueName: \"kubernetes.io/projected/13763859-5bdc-495b-b344-b2b8e0b7fd1e-kube-api-access-bvnch\") on node \"crc\" DevicePath \"\"" Oct 07 08:12:08 crc kubenswrapper[4875]: I1007 08:12:08.715789 4875 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/13763859-5bdc-495b-b344-b2b8e0b7fd1e-config\") on node \"crc\" DevicePath \"\"" Oct 07 08:12:09 crc kubenswrapper[4875]: I1007 08:12:09.060322 4875 generic.go:334] "Generic (PLEG): container finished" podID="9e6f843e-4ba3-46ef-b9ee-2ced2a5e201c" containerID="c4d99d35882e8d71109f7d7dec088f33215e61df809a11230c387abbd1874122" exitCode=0 Oct 07 08:12:09 crc kubenswrapper[4875]: I1007 08:12:09.061484 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8554648995-lwp8c" event={"ID":"9e6f843e-4ba3-46ef-b9ee-2ced2a5e201c","Type":"ContainerDied","Data":"c4d99d35882e8d71109f7d7dec088f33215e61df809a11230c387abbd1874122"} Oct 07 08:12:09 crc kubenswrapper[4875]: I1007 08:12:09.061522 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8554648995-lwp8c" event={"ID":"9e6f843e-4ba3-46ef-b9ee-2ced2a5e201c","Type":"ContainerStarted","Data":"907e17110f29303e14f55c9953fc50be5e441e67982cb55416b3a29787c306f2"} Oct 07 08:12:09 crc kubenswrapper[4875]: I1007 08:12:09.063540 4875 generic.go:334] "Generic (PLEG): container finished" podID="e2e77fdd-ebb0-4419-8981-7a70c201d833" containerID="02c26605091c626690101503921023d2b741fe3cf14cb4c3eb2ca762c25077d2" exitCode=0 Oct 07 08:12:09 crc kubenswrapper[4875]: I1007 08:12:09.063760 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6bc7876d45-s9gjw" event={"ID":"e2e77fdd-ebb0-4419-8981-7a70c201d833","Type":"ContainerDied","Data":"02c26605091c626690101503921023d2b741fe3cf14cb4c3eb2ca762c25077d2"} Oct 07 08:12:09 crc kubenswrapper[4875]: I1007 08:12:09.063782 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6bc7876d45-s9gjw" event={"ID":"e2e77fdd-ebb0-4419-8981-7a70c201d833","Type":"ContainerStarted","Data":"d8d450d0d327d6289774aa39b8453df4d61015c8a7d7329567a36b28cc9231d7"} Oct 07 08:12:09 crc kubenswrapper[4875]: I1007 08:12:09.071661 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-6rsbk" event={"ID":"e984b1d0-f011-46a5-8339-966f44e3c603","Type":"ContainerStarted","Data":"50a6fa06aa93b6bae6742969f6cdc51a14da5b6c4c55c65a377c6c1205439ec2"} Oct 07 08:12:09 crc kubenswrapper[4875]: I1007 08:12:09.077600 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-qbh8c" Oct 07 08:12:09 crc kubenswrapper[4875]: I1007 08:12:09.077585 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-qbh8c" event={"ID":"13763859-5bdc-495b-b344-b2b8e0b7fd1e","Type":"ContainerDied","Data":"44af87e8f257af7c4a676b6255f7dc5adb2c7be90b4a47345d284f17c9bb101d"} Oct 07 08:12:09 crc kubenswrapper[4875]: I1007 08:12:09.078465 4875 scope.go:117] "RemoveContainer" containerID="0a2c9c1424ee00ce05dc33ea7732bdfa24caa300aa1e151c387e990dfdce5422" Oct 07 08:12:09 crc kubenswrapper[4875]: I1007 08:12:09.080081 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5ccc8479f9-sx8tp" event={"ID":"12c7ef4c-d13d-4813-9e2f-37197f79207e","Type":"ContainerDied","Data":"2d3664ae96c7e1ea6ebc14cd37af025e919e8bab6d24f3f7667cb0c9182a3ef4"} Oct 07 08:12:09 crc kubenswrapper[4875]: I1007 08:12:09.080171 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5ccc8479f9-sx8tp" Oct 07 08:12:09 crc kubenswrapper[4875]: I1007 08:12:09.083809 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-sb-0" Oct 07 08:12:09 crc kubenswrapper[4875]: I1007 08:12:09.144854 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-nb-0" Oct 07 08:12:09 crc kubenswrapper[4875]: I1007 08:12:09.155490 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-sb-0" Oct 07 08:12:09 crc kubenswrapper[4875]: I1007 08:12:09.211358 4875 scope.go:117] "RemoveContainer" containerID="b53413e7e8db1cc219ffea2b9a6a2015b0d36faaf48310e19d25e9bfb6b3af8e" Oct 07 08:12:09 crc kubenswrapper[4875]: I1007 08:12:09.262202 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5ccc8479f9-sx8tp"] Oct 07 08:12:09 crc kubenswrapper[4875]: I1007 08:12:09.272830 4875 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5ccc8479f9-sx8tp"] Oct 07 08:12:09 crc kubenswrapper[4875]: I1007 08:12:09.286233 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-qbh8c"] Oct 07 08:12:09 crc kubenswrapper[4875]: I1007 08:12:09.294144 4875 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-qbh8c"] Oct 07 08:12:09 crc kubenswrapper[4875]: I1007 08:12:09.579202 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-northd-0"] Oct 07 08:12:09 crc kubenswrapper[4875]: E1007 08:12:09.579708 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="13763859-5bdc-495b-b344-b2b8e0b7fd1e" containerName="init" Oct 07 08:12:09 crc kubenswrapper[4875]: I1007 08:12:09.579733 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="13763859-5bdc-495b-b344-b2b8e0b7fd1e" containerName="init" Oct 07 08:12:09 crc kubenswrapper[4875]: E1007 08:12:09.579764 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="12c7ef4c-d13d-4813-9e2f-37197f79207e" containerName="init" Oct 07 08:12:09 crc kubenswrapper[4875]: I1007 08:12:09.579773 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="12c7ef4c-d13d-4813-9e2f-37197f79207e" containerName="init" Oct 07 08:12:09 crc kubenswrapper[4875]: I1007 08:12:09.580024 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="13763859-5bdc-495b-b344-b2b8e0b7fd1e" containerName="init" Oct 07 08:12:09 crc kubenswrapper[4875]: I1007 08:12:09.580049 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="12c7ef4c-d13d-4813-9e2f-37197f79207e" containerName="init" Oct 07 08:12:09 crc kubenswrapper[4875]: I1007 08:12:09.581345 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Oct 07 08:12:09 crc kubenswrapper[4875]: I1007 08:12:09.587851 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-config" Oct 07 08:12:09 crc kubenswrapper[4875]: I1007 08:12:09.588450 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovnnorthd-ovndbs" Oct 07 08:12:09 crc kubenswrapper[4875]: I1007 08:12:09.588767 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovnnorthd-ovnnorthd-dockercfg-rs7nn" Oct 07 08:12:09 crc kubenswrapper[4875]: I1007 08:12:09.589915 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-scripts" Oct 07 08:12:09 crc kubenswrapper[4875]: I1007 08:12:09.619062 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Oct 07 08:12:09 crc kubenswrapper[4875]: I1007 08:12:09.632155 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/84eec933-bbc6-4961-940a-1a26f31d2fd3-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"84eec933-bbc6-4961-940a-1a26f31d2fd3\") " pod="openstack/ovn-northd-0" Oct 07 08:12:09 crc kubenswrapper[4875]: I1007 08:12:09.632236 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/84eec933-bbc6-4961-940a-1a26f31d2fd3-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"84eec933-bbc6-4961-940a-1a26f31d2fd3\") " pod="openstack/ovn-northd-0" Oct 07 08:12:09 crc kubenswrapper[4875]: I1007 08:12:09.632294 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/84eec933-bbc6-4961-940a-1a26f31d2fd3-config\") pod \"ovn-northd-0\" (UID: \"84eec933-bbc6-4961-940a-1a26f31d2fd3\") " pod="openstack/ovn-northd-0" Oct 07 08:12:09 crc kubenswrapper[4875]: I1007 08:12:09.632352 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vcdvb\" (UniqueName: \"kubernetes.io/projected/84eec933-bbc6-4961-940a-1a26f31d2fd3-kube-api-access-vcdvb\") pod \"ovn-northd-0\" (UID: \"84eec933-bbc6-4961-940a-1a26f31d2fd3\") " pod="openstack/ovn-northd-0" Oct 07 08:12:09 crc kubenswrapper[4875]: I1007 08:12:09.632418 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/84eec933-bbc6-4961-940a-1a26f31d2fd3-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"84eec933-bbc6-4961-940a-1a26f31d2fd3\") " pod="openstack/ovn-northd-0" Oct 07 08:12:09 crc kubenswrapper[4875]: I1007 08:12:09.632466 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/84eec933-bbc6-4961-940a-1a26f31d2fd3-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"84eec933-bbc6-4961-940a-1a26f31d2fd3\") " pod="openstack/ovn-northd-0" Oct 07 08:12:09 crc kubenswrapper[4875]: I1007 08:12:09.632536 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/84eec933-bbc6-4961-940a-1a26f31d2fd3-scripts\") pod \"ovn-northd-0\" (UID: \"84eec933-bbc6-4961-940a-1a26f31d2fd3\") " pod="openstack/ovn-northd-0" Oct 07 08:12:09 crc kubenswrapper[4875]: I1007 08:12:09.710648 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="12c7ef4c-d13d-4813-9e2f-37197f79207e" path="/var/lib/kubelet/pods/12c7ef4c-d13d-4813-9e2f-37197f79207e/volumes" Oct 07 08:12:09 crc kubenswrapper[4875]: I1007 08:12:09.711197 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="13763859-5bdc-495b-b344-b2b8e0b7fd1e" path="/var/lib/kubelet/pods/13763859-5bdc-495b-b344-b2b8e0b7fd1e/volumes" Oct 07 08:12:09 crc kubenswrapper[4875]: I1007 08:12:09.734866 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/84eec933-bbc6-4961-940a-1a26f31d2fd3-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"84eec933-bbc6-4961-940a-1a26f31d2fd3\") " pod="openstack/ovn-northd-0" Oct 07 08:12:09 crc kubenswrapper[4875]: I1007 08:12:09.734971 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/84eec933-bbc6-4961-940a-1a26f31d2fd3-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"84eec933-bbc6-4961-940a-1a26f31d2fd3\") " pod="openstack/ovn-northd-0" Oct 07 08:12:09 crc kubenswrapper[4875]: I1007 08:12:09.735039 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/84eec933-bbc6-4961-940a-1a26f31d2fd3-scripts\") pod \"ovn-northd-0\" (UID: \"84eec933-bbc6-4961-940a-1a26f31d2fd3\") " pod="openstack/ovn-northd-0" Oct 07 08:12:09 crc kubenswrapper[4875]: I1007 08:12:09.735130 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/84eec933-bbc6-4961-940a-1a26f31d2fd3-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"84eec933-bbc6-4961-940a-1a26f31d2fd3\") " pod="openstack/ovn-northd-0" Oct 07 08:12:09 crc kubenswrapper[4875]: I1007 08:12:09.735170 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/84eec933-bbc6-4961-940a-1a26f31d2fd3-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"84eec933-bbc6-4961-940a-1a26f31d2fd3\") " pod="openstack/ovn-northd-0" Oct 07 08:12:09 crc kubenswrapper[4875]: I1007 08:12:09.735219 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/84eec933-bbc6-4961-940a-1a26f31d2fd3-config\") pod \"ovn-northd-0\" (UID: \"84eec933-bbc6-4961-940a-1a26f31d2fd3\") " pod="openstack/ovn-northd-0" Oct 07 08:12:09 crc kubenswrapper[4875]: I1007 08:12:09.735276 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vcdvb\" (UniqueName: \"kubernetes.io/projected/84eec933-bbc6-4961-940a-1a26f31d2fd3-kube-api-access-vcdvb\") pod \"ovn-northd-0\" (UID: \"84eec933-bbc6-4961-940a-1a26f31d2fd3\") " pod="openstack/ovn-northd-0" Oct 07 08:12:09 crc kubenswrapper[4875]: I1007 08:12:09.735817 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/84eec933-bbc6-4961-940a-1a26f31d2fd3-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"84eec933-bbc6-4961-940a-1a26f31d2fd3\") " pod="openstack/ovn-northd-0" Oct 07 08:12:09 crc kubenswrapper[4875]: I1007 08:12:09.736016 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/84eec933-bbc6-4961-940a-1a26f31d2fd3-scripts\") pod \"ovn-northd-0\" (UID: \"84eec933-bbc6-4961-940a-1a26f31d2fd3\") " pod="openstack/ovn-northd-0" Oct 07 08:12:09 crc kubenswrapper[4875]: I1007 08:12:09.736793 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/84eec933-bbc6-4961-940a-1a26f31d2fd3-config\") pod \"ovn-northd-0\" (UID: \"84eec933-bbc6-4961-940a-1a26f31d2fd3\") " pod="openstack/ovn-northd-0" Oct 07 08:12:09 crc kubenswrapper[4875]: I1007 08:12:09.739251 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/84eec933-bbc6-4961-940a-1a26f31d2fd3-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"84eec933-bbc6-4961-940a-1a26f31d2fd3\") " pod="openstack/ovn-northd-0" Oct 07 08:12:09 crc kubenswrapper[4875]: I1007 08:12:09.739266 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/84eec933-bbc6-4961-940a-1a26f31d2fd3-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"84eec933-bbc6-4961-940a-1a26f31d2fd3\") " pod="openstack/ovn-northd-0" Oct 07 08:12:09 crc kubenswrapper[4875]: I1007 08:12:09.740472 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/84eec933-bbc6-4961-940a-1a26f31d2fd3-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"84eec933-bbc6-4961-940a-1a26f31d2fd3\") " pod="openstack/ovn-northd-0" Oct 07 08:12:09 crc kubenswrapper[4875]: I1007 08:12:09.758507 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vcdvb\" (UniqueName: \"kubernetes.io/projected/84eec933-bbc6-4961-940a-1a26f31d2fd3-kube-api-access-vcdvb\") pod \"ovn-northd-0\" (UID: \"84eec933-bbc6-4961-940a-1a26f31d2fd3\") " pod="openstack/ovn-northd-0" Oct 07 08:12:09 crc kubenswrapper[4875]: I1007 08:12:09.902533 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Oct 07 08:12:10 crc kubenswrapper[4875]: I1007 08:12:10.108191 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8554648995-lwp8c" event={"ID":"9e6f843e-4ba3-46ef-b9ee-2ced2a5e201c","Type":"ContainerStarted","Data":"71aa8220f7c4b799545b0b8e62de0a98225351af2766cd97cf3bc610ff0ecba4"} Oct 07 08:12:10 crc kubenswrapper[4875]: I1007 08:12:10.108608 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-8554648995-lwp8c" Oct 07 08:12:10 crc kubenswrapper[4875]: I1007 08:12:10.126099 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6bc7876d45-s9gjw" event={"ID":"e2e77fdd-ebb0-4419-8981-7a70c201d833","Type":"ContainerStarted","Data":"a2a9961a9ec9b489a1ad5b756c084cfc8c19872d90369fd09fe8fee6470a0b44"} Oct 07 08:12:10 crc kubenswrapper[4875]: I1007 08:12:10.129567 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-6bc7876d45-s9gjw" Oct 07 08:12:10 crc kubenswrapper[4875]: I1007 08:12:10.131906 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-8554648995-lwp8c" podStartSLOduration=3.131886075 podStartE2EDuration="3.131886075s" podCreationTimestamp="2025-10-07 08:12:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 08:12:10.127651079 +0000 UTC m=+955.087421652" watchObservedRunningTime="2025-10-07 08:12:10.131886075 +0000 UTC m=+955.091656618" Oct 07 08:12:10 crc kubenswrapper[4875]: I1007 08:12:10.149489 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-6bc7876d45-s9gjw" podStartSLOduration=4.149458578 podStartE2EDuration="4.149458578s" podCreationTimestamp="2025-10-07 08:12:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 08:12:10.146989158 +0000 UTC m=+955.106759721" watchObservedRunningTime="2025-10-07 08:12:10.149458578 +0000 UTC m=+955.109229121" Oct 07 08:12:10 crc kubenswrapper[4875]: I1007 08:12:10.321559 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Oct 07 08:12:10 crc kubenswrapper[4875]: I1007 08:12:10.362123 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6bc7876d45-s9gjw"] Oct 07 08:12:10 crc kubenswrapper[4875]: I1007 08:12:10.405408 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-b8fbc5445-crjg7"] Oct 07 08:12:10 crc kubenswrapper[4875]: I1007 08:12:10.406802 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b8fbc5445-crjg7" Oct 07 08:12:10 crc kubenswrapper[4875]: I1007 08:12:10.421532 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-b8fbc5445-crjg7"] Oct 07 08:12:10 crc kubenswrapper[4875]: I1007 08:12:10.457450 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/16b4028b-5a2d-4e4e-85f7-a8af7e041875-ovsdbserver-nb\") pod \"dnsmasq-dns-b8fbc5445-crjg7\" (UID: \"16b4028b-5a2d-4e4e-85f7-a8af7e041875\") " pod="openstack/dnsmasq-dns-b8fbc5445-crjg7" Oct 07 08:12:10 crc kubenswrapper[4875]: I1007 08:12:10.457529 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/16b4028b-5a2d-4e4e-85f7-a8af7e041875-ovsdbserver-sb\") pod \"dnsmasq-dns-b8fbc5445-crjg7\" (UID: \"16b4028b-5a2d-4e4e-85f7-a8af7e041875\") " pod="openstack/dnsmasq-dns-b8fbc5445-crjg7" Oct 07 08:12:10 crc kubenswrapper[4875]: I1007 08:12:10.457582 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hzggs\" (UniqueName: \"kubernetes.io/projected/16b4028b-5a2d-4e4e-85f7-a8af7e041875-kube-api-access-hzggs\") pod \"dnsmasq-dns-b8fbc5445-crjg7\" (UID: \"16b4028b-5a2d-4e4e-85f7-a8af7e041875\") " pod="openstack/dnsmasq-dns-b8fbc5445-crjg7" Oct 07 08:12:10 crc kubenswrapper[4875]: I1007 08:12:10.457738 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/16b4028b-5a2d-4e4e-85f7-a8af7e041875-config\") pod \"dnsmasq-dns-b8fbc5445-crjg7\" (UID: \"16b4028b-5a2d-4e4e-85f7-a8af7e041875\") " pod="openstack/dnsmasq-dns-b8fbc5445-crjg7" Oct 07 08:12:10 crc kubenswrapper[4875]: I1007 08:12:10.457791 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/16b4028b-5a2d-4e4e-85f7-a8af7e041875-dns-svc\") pod \"dnsmasq-dns-b8fbc5445-crjg7\" (UID: \"16b4028b-5a2d-4e4e-85f7-a8af7e041875\") " pod="openstack/dnsmasq-dns-b8fbc5445-crjg7" Oct 07 08:12:10 crc kubenswrapper[4875]: W1007 08:12:10.458639 4875 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod84eec933_bbc6_4961_940a_1a26f31d2fd3.slice/crio-cb6f13bf22a132830063a2ff3fb440372779e2dd7f3ba28db03654d64b0f7ed9 WatchSource:0}: Error finding container cb6f13bf22a132830063a2ff3fb440372779e2dd7f3ba28db03654d64b0f7ed9: Status 404 returned error can't find the container with id cb6f13bf22a132830063a2ff3fb440372779e2dd7f3ba28db03654d64b0f7ed9 Oct 07 08:12:10 crc kubenswrapper[4875]: I1007 08:12:10.469799 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Oct 07 08:12:10 crc kubenswrapper[4875]: I1007 08:12:10.559502 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/16b4028b-5a2d-4e4e-85f7-a8af7e041875-config\") pod \"dnsmasq-dns-b8fbc5445-crjg7\" (UID: \"16b4028b-5a2d-4e4e-85f7-a8af7e041875\") " pod="openstack/dnsmasq-dns-b8fbc5445-crjg7" Oct 07 08:12:10 crc kubenswrapper[4875]: I1007 08:12:10.559562 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/16b4028b-5a2d-4e4e-85f7-a8af7e041875-dns-svc\") pod \"dnsmasq-dns-b8fbc5445-crjg7\" (UID: \"16b4028b-5a2d-4e4e-85f7-a8af7e041875\") " pod="openstack/dnsmasq-dns-b8fbc5445-crjg7" Oct 07 08:12:10 crc kubenswrapper[4875]: I1007 08:12:10.559652 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/16b4028b-5a2d-4e4e-85f7-a8af7e041875-ovsdbserver-nb\") pod \"dnsmasq-dns-b8fbc5445-crjg7\" (UID: \"16b4028b-5a2d-4e4e-85f7-a8af7e041875\") " pod="openstack/dnsmasq-dns-b8fbc5445-crjg7" Oct 07 08:12:10 crc kubenswrapper[4875]: I1007 08:12:10.559675 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/16b4028b-5a2d-4e4e-85f7-a8af7e041875-ovsdbserver-sb\") pod \"dnsmasq-dns-b8fbc5445-crjg7\" (UID: \"16b4028b-5a2d-4e4e-85f7-a8af7e041875\") " pod="openstack/dnsmasq-dns-b8fbc5445-crjg7" Oct 07 08:12:10 crc kubenswrapper[4875]: I1007 08:12:10.559699 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hzggs\" (UniqueName: \"kubernetes.io/projected/16b4028b-5a2d-4e4e-85f7-a8af7e041875-kube-api-access-hzggs\") pod \"dnsmasq-dns-b8fbc5445-crjg7\" (UID: \"16b4028b-5a2d-4e4e-85f7-a8af7e041875\") " pod="openstack/dnsmasq-dns-b8fbc5445-crjg7" Oct 07 08:12:10 crc kubenswrapper[4875]: I1007 08:12:10.560998 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/16b4028b-5a2d-4e4e-85f7-a8af7e041875-ovsdbserver-sb\") pod \"dnsmasq-dns-b8fbc5445-crjg7\" (UID: \"16b4028b-5a2d-4e4e-85f7-a8af7e041875\") " pod="openstack/dnsmasq-dns-b8fbc5445-crjg7" Oct 07 08:12:10 crc kubenswrapper[4875]: I1007 08:12:10.561289 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/16b4028b-5a2d-4e4e-85f7-a8af7e041875-dns-svc\") pod \"dnsmasq-dns-b8fbc5445-crjg7\" (UID: \"16b4028b-5a2d-4e4e-85f7-a8af7e041875\") " pod="openstack/dnsmasq-dns-b8fbc5445-crjg7" Oct 07 08:12:10 crc kubenswrapper[4875]: I1007 08:12:10.564230 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/16b4028b-5a2d-4e4e-85f7-a8af7e041875-config\") pod \"dnsmasq-dns-b8fbc5445-crjg7\" (UID: \"16b4028b-5a2d-4e4e-85f7-a8af7e041875\") " pod="openstack/dnsmasq-dns-b8fbc5445-crjg7" Oct 07 08:12:10 crc kubenswrapper[4875]: I1007 08:12:10.564250 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/16b4028b-5a2d-4e4e-85f7-a8af7e041875-ovsdbserver-nb\") pod \"dnsmasq-dns-b8fbc5445-crjg7\" (UID: \"16b4028b-5a2d-4e4e-85f7-a8af7e041875\") " pod="openstack/dnsmasq-dns-b8fbc5445-crjg7" Oct 07 08:12:10 crc kubenswrapper[4875]: I1007 08:12:10.580298 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hzggs\" (UniqueName: \"kubernetes.io/projected/16b4028b-5a2d-4e4e-85f7-a8af7e041875-kube-api-access-hzggs\") pod \"dnsmasq-dns-b8fbc5445-crjg7\" (UID: \"16b4028b-5a2d-4e4e-85f7-a8af7e041875\") " pod="openstack/dnsmasq-dns-b8fbc5445-crjg7" Oct 07 08:12:10 crc kubenswrapper[4875]: I1007 08:12:10.740897 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b8fbc5445-crjg7" Oct 07 08:12:11 crc kubenswrapper[4875]: I1007 08:12:11.161973 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"84eec933-bbc6-4961-940a-1a26f31d2fd3","Type":"ContainerStarted","Data":"cb6f13bf22a132830063a2ff3fb440372779e2dd7f3ba28db03654d64b0f7ed9"} Oct 07 08:12:11 crc kubenswrapper[4875]: W1007 08:12:11.203356 4875 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod16b4028b_5a2d_4e4e_85f7_a8af7e041875.slice/crio-e287334436cc740b51ba66a170d4644a7637b227db25cd79f5af94254c57ca26 WatchSource:0}: Error finding container e287334436cc740b51ba66a170d4644a7637b227db25cd79f5af94254c57ca26: Status 404 returned error can't find the container with id e287334436cc740b51ba66a170d4644a7637b227db25cd79f5af94254c57ca26 Oct 07 08:12:11 crc kubenswrapper[4875]: I1007 08:12:11.205414 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-b8fbc5445-crjg7"] Oct 07 08:12:11 crc kubenswrapper[4875]: I1007 08:12:11.470038 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-storage-0"] Oct 07 08:12:11 crc kubenswrapper[4875]: I1007 08:12:11.476036 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Oct 07 08:12:11 crc kubenswrapper[4875]: I1007 08:12:11.479146 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-storage-config-data" Oct 07 08:12:11 crc kubenswrapper[4875]: I1007 08:12:11.479507 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-files" Oct 07 08:12:11 crc kubenswrapper[4875]: I1007 08:12:11.482569 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-conf" Oct 07 08:12:11 crc kubenswrapper[4875]: I1007 08:12:11.489359 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-swift-dockercfg-z5ndl" Oct 07 08:12:11 crc kubenswrapper[4875]: I1007 08:12:11.491681 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-storage-0"] Oct 07 08:12:11 crc kubenswrapper[4875]: I1007 08:12:11.582415 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mmnbj\" (UniqueName: \"kubernetes.io/projected/97048952-fbe0-46f2-8163-00ec9381508b-kube-api-access-mmnbj\") pod \"swift-storage-0\" (UID: \"97048952-fbe0-46f2-8163-00ec9381508b\") " pod="openstack/swift-storage-0" Oct 07 08:12:11 crc kubenswrapper[4875]: I1007 08:12:11.582503 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"swift-storage-0\" (UID: \"97048952-fbe0-46f2-8163-00ec9381508b\") " pod="openstack/swift-storage-0" Oct 07 08:12:11 crc kubenswrapper[4875]: I1007 08:12:11.582582 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/97048952-fbe0-46f2-8163-00ec9381508b-cache\") pod \"swift-storage-0\" (UID: \"97048952-fbe0-46f2-8163-00ec9381508b\") " pod="openstack/swift-storage-0" Oct 07 08:12:11 crc kubenswrapper[4875]: I1007 08:12:11.582644 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/97048952-fbe0-46f2-8163-00ec9381508b-lock\") pod \"swift-storage-0\" (UID: \"97048952-fbe0-46f2-8163-00ec9381508b\") " pod="openstack/swift-storage-0" Oct 07 08:12:11 crc kubenswrapper[4875]: I1007 08:12:11.582693 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/97048952-fbe0-46f2-8163-00ec9381508b-etc-swift\") pod \"swift-storage-0\" (UID: \"97048952-fbe0-46f2-8163-00ec9381508b\") " pod="openstack/swift-storage-0" Oct 07 08:12:11 crc kubenswrapper[4875]: I1007 08:12:11.684272 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"swift-storage-0\" (UID: \"97048952-fbe0-46f2-8163-00ec9381508b\") " pod="openstack/swift-storage-0" Oct 07 08:12:11 crc kubenswrapper[4875]: I1007 08:12:11.684336 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/97048952-fbe0-46f2-8163-00ec9381508b-cache\") pod \"swift-storage-0\" (UID: \"97048952-fbe0-46f2-8163-00ec9381508b\") " pod="openstack/swift-storage-0" Oct 07 08:12:11 crc kubenswrapper[4875]: I1007 08:12:11.684376 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/97048952-fbe0-46f2-8163-00ec9381508b-lock\") pod \"swift-storage-0\" (UID: \"97048952-fbe0-46f2-8163-00ec9381508b\") " pod="openstack/swift-storage-0" Oct 07 08:12:11 crc kubenswrapper[4875]: I1007 08:12:11.684432 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/97048952-fbe0-46f2-8163-00ec9381508b-etc-swift\") pod \"swift-storage-0\" (UID: \"97048952-fbe0-46f2-8163-00ec9381508b\") " pod="openstack/swift-storage-0" Oct 07 08:12:11 crc kubenswrapper[4875]: I1007 08:12:11.684484 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mmnbj\" (UniqueName: \"kubernetes.io/projected/97048952-fbe0-46f2-8163-00ec9381508b-kube-api-access-mmnbj\") pod \"swift-storage-0\" (UID: \"97048952-fbe0-46f2-8163-00ec9381508b\") " pod="openstack/swift-storage-0" Oct 07 08:12:11 crc kubenswrapper[4875]: I1007 08:12:11.685170 4875 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"swift-storage-0\" (UID: \"97048952-fbe0-46f2-8163-00ec9381508b\") device mount path \"/mnt/openstack/pv08\"" pod="openstack/swift-storage-0" Oct 07 08:12:11 crc kubenswrapper[4875]: I1007 08:12:11.685543 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/97048952-fbe0-46f2-8163-00ec9381508b-cache\") pod \"swift-storage-0\" (UID: \"97048952-fbe0-46f2-8163-00ec9381508b\") " pod="openstack/swift-storage-0" Oct 07 08:12:11 crc kubenswrapper[4875]: I1007 08:12:11.685834 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/97048952-fbe0-46f2-8163-00ec9381508b-lock\") pod \"swift-storage-0\" (UID: \"97048952-fbe0-46f2-8163-00ec9381508b\") " pod="openstack/swift-storage-0" Oct 07 08:12:11 crc kubenswrapper[4875]: E1007 08:12:11.685975 4875 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Oct 07 08:12:11 crc kubenswrapper[4875]: E1007 08:12:11.685999 4875 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Oct 07 08:12:11 crc kubenswrapper[4875]: E1007 08:12:11.686042 4875 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/97048952-fbe0-46f2-8163-00ec9381508b-etc-swift podName:97048952-fbe0-46f2-8163-00ec9381508b nodeName:}" failed. No retries permitted until 2025-10-07 08:12:12.186026245 +0000 UTC m=+957.145796788 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/97048952-fbe0-46f2-8163-00ec9381508b-etc-swift") pod "swift-storage-0" (UID: "97048952-fbe0-46f2-8163-00ec9381508b") : configmap "swift-ring-files" not found Oct 07 08:12:11 crc kubenswrapper[4875]: I1007 08:12:11.718797 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mmnbj\" (UniqueName: \"kubernetes.io/projected/97048952-fbe0-46f2-8163-00ec9381508b-kube-api-access-mmnbj\") pod \"swift-storage-0\" (UID: \"97048952-fbe0-46f2-8163-00ec9381508b\") " pod="openstack/swift-storage-0" Oct 07 08:12:11 crc kubenswrapper[4875]: I1007 08:12:11.722054 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"swift-storage-0\" (UID: \"97048952-fbe0-46f2-8163-00ec9381508b\") " pod="openstack/swift-storage-0" Oct 07 08:12:11 crc kubenswrapper[4875]: I1007 08:12:11.914907 4875 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-galera-0" Oct 07 08:12:11 crc kubenswrapper[4875]: I1007 08:12:11.964691 4875 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/openstack-galera-0" podUID="027e3c4c-1861-4933-9d30-636575099b5c" containerName="galera" probeResult="failure" output=< Oct 07 08:12:11 crc kubenswrapper[4875]: wsrep_local_state_comment (Joined) differs from Synced Oct 07 08:12:11 crc kubenswrapper[4875]: > Oct 07 08:12:12 crc kubenswrapper[4875]: I1007 08:12:12.169723 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b8fbc5445-crjg7" event={"ID":"16b4028b-5a2d-4e4e-85f7-a8af7e041875","Type":"ContainerStarted","Data":"e287334436cc740b51ba66a170d4644a7637b227db25cd79f5af94254c57ca26"} Oct 07 08:12:12 crc kubenswrapper[4875]: I1007 08:12:12.170033 4875 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-6bc7876d45-s9gjw" podUID="e2e77fdd-ebb0-4419-8981-7a70c201d833" containerName="dnsmasq-dns" containerID="cri-o://a2a9961a9ec9b489a1ad5b756c084cfc8c19872d90369fd09fe8fee6470a0b44" gracePeriod=10 Oct 07 08:12:12 crc kubenswrapper[4875]: I1007 08:12:12.192910 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/97048952-fbe0-46f2-8163-00ec9381508b-etc-swift\") pod \"swift-storage-0\" (UID: \"97048952-fbe0-46f2-8163-00ec9381508b\") " pod="openstack/swift-storage-0" Oct 07 08:12:12 crc kubenswrapper[4875]: E1007 08:12:12.193111 4875 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Oct 07 08:12:12 crc kubenswrapper[4875]: E1007 08:12:12.193152 4875 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Oct 07 08:12:12 crc kubenswrapper[4875]: E1007 08:12:12.193230 4875 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/97048952-fbe0-46f2-8163-00ec9381508b-etc-swift podName:97048952-fbe0-46f2-8163-00ec9381508b nodeName:}" failed. No retries permitted until 2025-10-07 08:12:13.193206694 +0000 UTC m=+958.152977237 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/97048952-fbe0-46f2-8163-00ec9381508b-etc-swift") pod "swift-storage-0" (UID: "97048952-fbe0-46f2-8163-00ec9381508b") : configmap "swift-ring-files" not found Oct 07 08:12:13 crc kubenswrapper[4875]: I1007 08:12:13.185325 4875 generic.go:334] "Generic (PLEG): container finished" podID="e2e77fdd-ebb0-4419-8981-7a70c201d833" containerID="a2a9961a9ec9b489a1ad5b756c084cfc8c19872d90369fd09fe8fee6470a0b44" exitCode=0 Oct 07 08:12:13 crc kubenswrapper[4875]: I1007 08:12:13.185404 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6bc7876d45-s9gjw" event={"ID":"e2e77fdd-ebb0-4419-8981-7a70c201d833","Type":"ContainerDied","Data":"a2a9961a9ec9b489a1ad5b756c084cfc8c19872d90369fd09fe8fee6470a0b44"} Oct 07 08:12:13 crc kubenswrapper[4875]: I1007 08:12:13.211789 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/97048952-fbe0-46f2-8163-00ec9381508b-etc-swift\") pod \"swift-storage-0\" (UID: \"97048952-fbe0-46f2-8163-00ec9381508b\") " pod="openstack/swift-storage-0" Oct 07 08:12:13 crc kubenswrapper[4875]: E1007 08:12:13.212013 4875 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Oct 07 08:12:13 crc kubenswrapper[4875]: E1007 08:12:13.212058 4875 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Oct 07 08:12:13 crc kubenswrapper[4875]: E1007 08:12:13.212126 4875 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/97048952-fbe0-46f2-8163-00ec9381508b-etc-swift podName:97048952-fbe0-46f2-8163-00ec9381508b nodeName:}" failed. No retries permitted until 2025-10-07 08:12:15.212107067 +0000 UTC m=+960.171877610 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/97048952-fbe0-46f2-8163-00ec9381508b-etc-swift") pod "swift-storage-0" (UID: "97048952-fbe0-46f2-8163-00ec9381508b") : configmap "swift-ring-files" not found Oct 07 08:12:15 crc kubenswrapper[4875]: I1007 08:12:15.267541 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/97048952-fbe0-46f2-8163-00ec9381508b-etc-swift\") pod \"swift-storage-0\" (UID: \"97048952-fbe0-46f2-8163-00ec9381508b\") " pod="openstack/swift-storage-0" Oct 07 08:12:15 crc kubenswrapper[4875]: E1007 08:12:15.267805 4875 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Oct 07 08:12:15 crc kubenswrapper[4875]: E1007 08:12:15.267948 4875 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Oct 07 08:12:15 crc kubenswrapper[4875]: E1007 08:12:15.268009 4875 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/97048952-fbe0-46f2-8163-00ec9381508b-etc-swift podName:97048952-fbe0-46f2-8163-00ec9381508b nodeName:}" failed. No retries permitted until 2025-10-07 08:12:19.267993474 +0000 UTC m=+964.227764017 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/97048952-fbe0-46f2-8163-00ec9381508b-etc-swift") pod "swift-storage-0" (UID: "97048952-fbe0-46f2-8163-00ec9381508b") : configmap "swift-ring-files" not found Oct 07 08:12:15 crc kubenswrapper[4875]: I1007 08:12:15.456206 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-ring-rebalance-7bh99"] Oct 07 08:12:15 crc kubenswrapper[4875]: I1007 08:12:15.458076 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-7bh99" Oct 07 08:12:15 crc kubenswrapper[4875]: I1007 08:12:15.460418 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-config-data" Oct 07 08:12:15 crc kubenswrapper[4875]: I1007 08:12:15.460427 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Oct 07 08:12:15 crc kubenswrapper[4875]: I1007 08:12:15.460705 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-scripts" Oct 07 08:12:15 crc kubenswrapper[4875]: I1007 08:12:15.466039 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-7bh99"] Oct 07 08:12:15 crc kubenswrapper[4875]: I1007 08:12:15.471773 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e4ed1841-ffa8-4d3b-8a66-43221118d007-scripts\") pod \"swift-ring-rebalance-7bh99\" (UID: \"e4ed1841-ffa8-4d3b-8a66-43221118d007\") " pod="openstack/swift-ring-rebalance-7bh99" Oct 07 08:12:15 crc kubenswrapper[4875]: I1007 08:12:15.471863 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/e4ed1841-ffa8-4d3b-8a66-43221118d007-etc-swift\") pod \"swift-ring-rebalance-7bh99\" (UID: \"e4ed1841-ffa8-4d3b-8a66-43221118d007\") " pod="openstack/swift-ring-rebalance-7bh99" Oct 07 08:12:15 crc kubenswrapper[4875]: I1007 08:12:15.471910 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/e4ed1841-ffa8-4d3b-8a66-43221118d007-swiftconf\") pod \"swift-ring-rebalance-7bh99\" (UID: \"e4ed1841-ffa8-4d3b-8a66-43221118d007\") " pod="openstack/swift-ring-rebalance-7bh99" Oct 07 08:12:15 crc kubenswrapper[4875]: I1007 08:12:15.471941 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/e4ed1841-ffa8-4d3b-8a66-43221118d007-dispersionconf\") pod \"swift-ring-rebalance-7bh99\" (UID: \"e4ed1841-ffa8-4d3b-8a66-43221118d007\") " pod="openstack/swift-ring-rebalance-7bh99" Oct 07 08:12:15 crc kubenswrapper[4875]: I1007 08:12:15.472099 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/e4ed1841-ffa8-4d3b-8a66-43221118d007-ring-data-devices\") pod \"swift-ring-rebalance-7bh99\" (UID: \"e4ed1841-ffa8-4d3b-8a66-43221118d007\") " pod="openstack/swift-ring-rebalance-7bh99" Oct 07 08:12:15 crc kubenswrapper[4875]: I1007 08:12:15.472130 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t8f7f\" (UniqueName: \"kubernetes.io/projected/e4ed1841-ffa8-4d3b-8a66-43221118d007-kube-api-access-t8f7f\") pod \"swift-ring-rebalance-7bh99\" (UID: \"e4ed1841-ffa8-4d3b-8a66-43221118d007\") " pod="openstack/swift-ring-rebalance-7bh99" Oct 07 08:12:15 crc kubenswrapper[4875]: I1007 08:12:15.472165 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e4ed1841-ffa8-4d3b-8a66-43221118d007-combined-ca-bundle\") pod \"swift-ring-rebalance-7bh99\" (UID: \"e4ed1841-ffa8-4d3b-8a66-43221118d007\") " pod="openstack/swift-ring-rebalance-7bh99" Oct 07 08:12:15 crc kubenswrapper[4875]: I1007 08:12:15.573659 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/e4ed1841-ffa8-4d3b-8a66-43221118d007-etc-swift\") pod \"swift-ring-rebalance-7bh99\" (UID: \"e4ed1841-ffa8-4d3b-8a66-43221118d007\") " pod="openstack/swift-ring-rebalance-7bh99" Oct 07 08:12:15 crc kubenswrapper[4875]: I1007 08:12:15.573705 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/e4ed1841-ffa8-4d3b-8a66-43221118d007-swiftconf\") pod \"swift-ring-rebalance-7bh99\" (UID: \"e4ed1841-ffa8-4d3b-8a66-43221118d007\") " pod="openstack/swift-ring-rebalance-7bh99" Oct 07 08:12:15 crc kubenswrapper[4875]: I1007 08:12:15.573731 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/e4ed1841-ffa8-4d3b-8a66-43221118d007-dispersionconf\") pod \"swift-ring-rebalance-7bh99\" (UID: \"e4ed1841-ffa8-4d3b-8a66-43221118d007\") " pod="openstack/swift-ring-rebalance-7bh99" Oct 07 08:12:15 crc kubenswrapper[4875]: I1007 08:12:15.573828 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/e4ed1841-ffa8-4d3b-8a66-43221118d007-ring-data-devices\") pod \"swift-ring-rebalance-7bh99\" (UID: \"e4ed1841-ffa8-4d3b-8a66-43221118d007\") " pod="openstack/swift-ring-rebalance-7bh99" Oct 07 08:12:15 crc kubenswrapper[4875]: I1007 08:12:15.573850 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t8f7f\" (UniqueName: \"kubernetes.io/projected/e4ed1841-ffa8-4d3b-8a66-43221118d007-kube-api-access-t8f7f\") pod \"swift-ring-rebalance-7bh99\" (UID: \"e4ed1841-ffa8-4d3b-8a66-43221118d007\") " pod="openstack/swift-ring-rebalance-7bh99" Oct 07 08:12:15 crc kubenswrapper[4875]: I1007 08:12:15.573910 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e4ed1841-ffa8-4d3b-8a66-43221118d007-combined-ca-bundle\") pod \"swift-ring-rebalance-7bh99\" (UID: \"e4ed1841-ffa8-4d3b-8a66-43221118d007\") " pod="openstack/swift-ring-rebalance-7bh99" Oct 07 08:12:15 crc kubenswrapper[4875]: I1007 08:12:15.573970 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e4ed1841-ffa8-4d3b-8a66-43221118d007-scripts\") pod \"swift-ring-rebalance-7bh99\" (UID: \"e4ed1841-ffa8-4d3b-8a66-43221118d007\") " pod="openstack/swift-ring-rebalance-7bh99" Oct 07 08:12:15 crc kubenswrapper[4875]: I1007 08:12:15.574224 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/e4ed1841-ffa8-4d3b-8a66-43221118d007-etc-swift\") pod \"swift-ring-rebalance-7bh99\" (UID: \"e4ed1841-ffa8-4d3b-8a66-43221118d007\") " pod="openstack/swift-ring-rebalance-7bh99" Oct 07 08:12:15 crc kubenswrapper[4875]: I1007 08:12:15.575133 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e4ed1841-ffa8-4d3b-8a66-43221118d007-scripts\") pod \"swift-ring-rebalance-7bh99\" (UID: \"e4ed1841-ffa8-4d3b-8a66-43221118d007\") " pod="openstack/swift-ring-rebalance-7bh99" Oct 07 08:12:15 crc kubenswrapper[4875]: I1007 08:12:15.575167 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/e4ed1841-ffa8-4d3b-8a66-43221118d007-ring-data-devices\") pod \"swift-ring-rebalance-7bh99\" (UID: \"e4ed1841-ffa8-4d3b-8a66-43221118d007\") " pod="openstack/swift-ring-rebalance-7bh99" Oct 07 08:12:15 crc kubenswrapper[4875]: I1007 08:12:15.580774 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/e4ed1841-ffa8-4d3b-8a66-43221118d007-dispersionconf\") pod \"swift-ring-rebalance-7bh99\" (UID: \"e4ed1841-ffa8-4d3b-8a66-43221118d007\") " pod="openstack/swift-ring-rebalance-7bh99" Oct 07 08:12:15 crc kubenswrapper[4875]: I1007 08:12:15.581275 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/e4ed1841-ffa8-4d3b-8a66-43221118d007-swiftconf\") pod \"swift-ring-rebalance-7bh99\" (UID: \"e4ed1841-ffa8-4d3b-8a66-43221118d007\") " pod="openstack/swift-ring-rebalance-7bh99" Oct 07 08:12:15 crc kubenswrapper[4875]: I1007 08:12:15.587797 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e4ed1841-ffa8-4d3b-8a66-43221118d007-combined-ca-bundle\") pod \"swift-ring-rebalance-7bh99\" (UID: \"e4ed1841-ffa8-4d3b-8a66-43221118d007\") " pod="openstack/swift-ring-rebalance-7bh99" Oct 07 08:12:15 crc kubenswrapper[4875]: I1007 08:12:15.600138 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t8f7f\" (UniqueName: \"kubernetes.io/projected/e4ed1841-ffa8-4d3b-8a66-43221118d007-kube-api-access-t8f7f\") pod \"swift-ring-rebalance-7bh99\" (UID: \"e4ed1841-ffa8-4d3b-8a66-43221118d007\") " pod="openstack/swift-ring-rebalance-7bh99" Oct 07 08:12:15 crc kubenswrapper[4875]: I1007 08:12:15.786787 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-swift-dockercfg-z5ndl" Oct 07 08:12:15 crc kubenswrapper[4875]: I1007 08:12:15.795342 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-7bh99" Oct 07 08:12:16 crc kubenswrapper[4875]: I1007 08:12:16.370506 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-7bh99"] Oct 07 08:12:16 crc kubenswrapper[4875]: W1007 08:12:16.375955 4875 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode4ed1841_ffa8_4d3b_8a66_43221118d007.slice/crio-1ff3d0df4300d2f12a2905f1727f68a9833afc943aa84d260c5b8a6a308b5056 WatchSource:0}: Error finding container 1ff3d0df4300d2f12a2905f1727f68a9833afc943aa84d260c5b8a6a308b5056: Status 404 returned error can't find the container with id 1ff3d0df4300d2f12a2905f1727f68a9833afc943aa84d260c5b8a6a308b5056 Oct 07 08:12:17 crc kubenswrapper[4875]: I1007 08:12:17.217494 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-7bh99" event={"ID":"e4ed1841-ffa8-4d3b-8a66-43221118d007","Type":"ContainerStarted","Data":"1ff3d0df4300d2f12a2905f1727f68a9833afc943aa84d260c5b8a6a308b5056"} Oct 07 08:12:17 crc kubenswrapper[4875]: I1007 08:12:17.219650 4875 generic.go:334] "Generic (PLEG): container finished" podID="16b4028b-5a2d-4e4e-85f7-a8af7e041875" containerID="53acb5efdebdce4507eddff69515a368553ea6632204aebc6a8993b4d20fe71a" exitCode=0 Oct 07 08:12:17 crc kubenswrapper[4875]: I1007 08:12:17.219704 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b8fbc5445-crjg7" event={"ID":"16b4028b-5a2d-4e4e-85f7-a8af7e041875","Type":"ContainerDied","Data":"53acb5efdebdce4507eddff69515a368553ea6632204aebc6a8993b4d20fe71a"} Oct 07 08:12:17 crc kubenswrapper[4875]: I1007 08:12:17.512824 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6bc7876d45-s9gjw" Oct 07 08:12:17 crc kubenswrapper[4875]: I1007 08:12:17.529751 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vhmps\" (UniqueName: \"kubernetes.io/projected/e2e77fdd-ebb0-4419-8981-7a70c201d833-kube-api-access-vhmps\") pod \"e2e77fdd-ebb0-4419-8981-7a70c201d833\" (UID: \"e2e77fdd-ebb0-4419-8981-7a70c201d833\") " Oct 07 08:12:17 crc kubenswrapper[4875]: I1007 08:12:17.529935 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e2e77fdd-ebb0-4419-8981-7a70c201d833-config\") pod \"e2e77fdd-ebb0-4419-8981-7a70c201d833\" (UID: \"e2e77fdd-ebb0-4419-8981-7a70c201d833\") " Oct 07 08:12:17 crc kubenswrapper[4875]: I1007 08:12:17.529963 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e2e77fdd-ebb0-4419-8981-7a70c201d833-ovsdbserver-sb\") pod \"e2e77fdd-ebb0-4419-8981-7a70c201d833\" (UID: \"e2e77fdd-ebb0-4419-8981-7a70c201d833\") " Oct 07 08:12:17 crc kubenswrapper[4875]: I1007 08:12:17.530052 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e2e77fdd-ebb0-4419-8981-7a70c201d833-dns-svc\") pod \"e2e77fdd-ebb0-4419-8981-7a70c201d833\" (UID: \"e2e77fdd-ebb0-4419-8981-7a70c201d833\") " Oct 07 08:12:17 crc kubenswrapper[4875]: I1007 08:12:17.539019 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e2e77fdd-ebb0-4419-8981-7a70c201d833-kube-api-access-vhmps" (OuterVolumeSpecName: "kube-api-access-vhmps") pod "e2e77fdd-ebb0-4419-8981-7a70c201d833" (UID: "e2e77fdd-ebb0-4419-8981-7a70c201d833"). InnerVolumeSpecName "kube-api-access-vhmps". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 08:12:17 crc kubenswrapper[4875]: I1007 08:12:17.583297 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e2e77fdd-ebb0-4419-8981-7a70c201d833-config" (OuterVolumeSpecName: "config") pod "e2e77fdd-ebb0-4419-8981-7a70c201d833" (UID: "e2e77fdd-ebb0-4419-8981-7a70c201d833"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 08:12:17 crc kubenswrapper[4875]: I1007 08:12:17.587328 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e2e77fdd-ebb0-4419-8981-7a70c201d833-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "e2e77fdd-ebb0-4419-8981-7a70c201d833" (UID: "e2e77fdd-ebb0-4419-8981-7a70c201d833"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 08:12:17 crc kubenswrapper[4875]: I1007 08:12:17.604466 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e2e77fdd-ebb0-4419-8981-7a70c201d833-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "e2e77fdd-ebb0-4419-8981-7a70c201d833" (UID: "e2e77fdd-ebb0-4419-8981-7a70c201d833"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 08:12:17 crc kubenswrapper[4875]: I1007 08:12:17.632256 4875 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e2e77fdd-ebb0-4419-8981-7a70c201d833-config\") on node \"crc\" DevicePath \"\"" Oct 07 08:12:17 crc kubenswrapper[4875]: I1007 08:12:17.632549 4875 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e2e77fdd-ebb0-4419-8981-7a70c201d833-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 07 08:12:17 crc kubenswrapper[4875]: I1007 08:12:17.632644 4875 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e2e77fdd-ebb0-4419-8981-7a70c201d833-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 07 08:12:17 crc kubenswrapper[4875]: I1007 08:12:17.632798 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vhmps\" (UniqueName: \"kubernetes.io/projected/e2e77fdd-ebb0-4419-8981-7a70c201d833-kube-api-access-vhmps\") on node \"crc\" DevicePath \"\"" Oct 07 08:12:17 crc kubenswrapper[4875]: I1007 08:12:17.710944 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-8554648995-lwp8c" Oct 07 08:12:17 crc kubenswrapper[4875]: I1007 08:12:17.896169 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-galera-0" Oct 07 08:12:18 crc kubenswrapper[4875]: I1007 08:12:18.237011 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b8fbc5445-crjg7" event={"ID":"16b4028b-5a2d-4e4e-85f7-a8af7e041875","Type":"ContainerStarted","Data":"f4248a5ea8143bd48f696e198b89334c6dabfeff3ffc52b58b6bb604fc388db1"} Oct 07 08:12:18 crc kubenswrapper[4875]: I1007 08:12:18.238195 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-b8fbc5445-crjg7" Oct 07 08:12:18 crc kubenswrapper[4875]: I1007 08:12:18.245574 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6bc7876d45-s9gjw" Oct 07 08:12:18 crc kubenswrapper[4875]: I1007 08:12:18.246547 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6bc7876d45-s9gjw" event={"ID":"e2e77fdd-ebb0-4419-8981-7a70c201d833","Type":"ContainerDied","Data":"d8d450d0d327d6289774aa39b8453df4d61015c8a7d7329567a36b28cc9231d7"} Oct 07 08:12:18 crc kubenswrapper[4875]: I1007 08:12:18.246612 4875 scope.go:117] "RemoveContainer" containerID="a2a9961a9ec9b489a1ad5b756c084cfc8c19872d90369fd09fe8fee6470a0b44" Oct 07 08:12:18 crc kubenswrapper[4875]: I1007 08:12:18.266608 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-b8fbc5445-crjg7" podStartSLOduration=8.266587444 podStartE2EDuration="8.266587444s" podCreationTimestamp="2025-10-07 08:12:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 08:12:18.261610665 +0000 UTC m=+963.221381218" watchObservedRunningTime="2025-10-07 08:12:18.266587444 +0000 UTC m=+963.226357987" Oct 07 08:12:18 crc kubenswrapper[4875]: I1007 08:12:18.306798 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-create-zh4xd"] Oct 07 08:12:18 crc kubenswrapper[4875]: E1007 08:12:18.310822 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e2e77fdd-ebb0-4419-8981-7a70c201d833" containerName="dnsmasq-dns" Oct 07 08:12:18 crc kubenswrapper[4875]: I1007 08:12:18.310854 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="e2e77fdd-ebb0-4419-8981-7a70c201d833" containerName="dnsmasq-dns" Oct 07 08:12:18 crc kubenswrapper[4875]: E1007 08:12:18.310923 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e2e77fdd-ebb0-4419-8981-7a70c201d833" containerName="init" Oct 07 08:12:18 crc kubenswrapper[4875]: I1007 08:12:18.310933 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="e2e77fdd-ebb0-4419-8981-7a70c201d833" containerName="init" Oct 07 08:12:18 crc kubenswrapper[4875]: I1007 08:12:18.311135 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="e2e77fdd-ebb0-4419-8981-7a70c201d833" containerName="dnsmasq-dns" Oct 07 08:12:18 crc kubenswrapper[4875]: I1007 08:12:18.311796 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-zh4xd" Oct 07 08:12:18 crc kubenswrapper[4875]: I1007 08:12:18.316061 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-zh4xd"] Oct 07 08:12:18 crc kubenswrapper[4875]: I1007 08:12:18.331670 4875 scope.go:117] "RemoveContainer" containerID="02c26605091c626690101503921023d2b741fe3cf14cb4c3eb2ca762c25077d2" Oct 07 08:12:18 crc kubenswrapper[4875]: I1007 08:12:18.349456 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w8bj8\" (UniqueName: \"kubernetes.io/projected/e5431993-98ef-4f56-ba68-210402d2be39-kube-api-access-w8bj8\") pod \"placement-db-create-zh4xd\" (UID: \"e5431993-98ef-4f56-ba68-210402d2be39\") " pod="openstack/placement-db-create-zh4xd" Oct 07 08:12:18 crc kubenswrapper[4875]: I1007 08:12:18.357808 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6bc7876d45-s9gjw"] Oct 07 08:12:18 crc kubenswrapper[4875]: I1007 08:12:18.366199 4875 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6bc7876d45-s9gjw"] Oct 07 08:12:18 crc kubenswrapper[4875]: I1007 08:12:18.451311 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w8bj8\" (UniqueName: \"kubernetes.io/projected/e5431993-98ef-4f56-ba68-210402d2be39-kube-api-access-w8bj8\") pod \"placement-db-create-zh4xd\" (UID: \"e5431993-98ef-4f56-ba68-210402d2be39\") " pod="openstack/placement-db-create-zh4xd" Oct 07 08:12:18 crc kubenswrapper[4875]: I1007 08:12:18.472183 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w8bj8\" (UniqueName: \"kubernetes.io/projected/e5431993-98ef-4f56-ba68-210402d2be39-kube-api-access-w8bj8\") pod \"placement-db-create-zh4xd\" (UID: \"e5431993-98ef-4f56-ba68-210402d2be39\") " pod="openstack/placement-db-create-zh4xd" Oct 07 08:12:18 crc kubenswrapper[4875]: I1007 08:12:18.552415 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-create-5dzb8"] Oct 07 08:12:18 crc kubenswrapper[4875]: I1007 08:12:18.556592 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-5dzb8" Oct 07 08:12:18 crc kubenswrapper[4875]: I1007 08:12:18.565810 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-5dzb8"] Oct 07 08:12:18 crc kubenswrapper[4875]: I1007 08:12:18.632304 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-zh4xd" Oct 07 08:12:18 crc kubenswrapper[4875]: I1007 08:12:18.656183 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zcmrw\" (UniqueName: \"kubernetes.io/projected/4fd749a4-1003-4a72-b637-98af9a1ff9f3-kube-api-access-zcmrw\") pod \"glance-db-create-5dzb8\" (UID: \"4fd749a4-1003-4a72-b637-98af9a1ff9f3\") " pod="openstack/glance-db-create-5dzb8" Oct 07 08:12:18 crc kubenswrapper[4875]: I1007 08:12:18.757636 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zcmrw\" (UniqueName: \"kubernetes.io/projected/4fd749a4-1003-4a72-b637-98af9a1ff9f3-kube-api-access-zcmrw\") pod \"glance-db-create-5dzb8\" (UID: \"4fd749a4-1003-4a72-b637-98af9a1ff9f3\") " pod="openstack/glance-db-create-5dzb8" Oct 07 08:12:18 crc kubenswrapper[4875]: I1007 08:12:18.780376 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zcmrw\" (UniqueName: \"kubernetes.io/projected/4fd749a4-1003-4a72-b637-98af9a1ff9f3-kube-api-access-zcmrw\") pod \"glance-db-create-5dzb8\" (UID: \"4fd749a4-1003-4a72-b637-98af9a1ff9f3\") " pod="openstack/glance-db-create-5dzb8" Oct 07 08:12:18 crc kubenswrapper[4875]: I1007 08:12:18.896291 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-zh4xd"] Oct 07 08:12:18 crc kubenswrapper[4875]: I1007 08:12:18.929435 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-5dzb8" Oct 07 08:12:19 crc kubenswrapper[4875]: I1007 08:12:19.282318 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/97048952-fbe0-46f2-8163-00ec9381508b-etc-swift\") pod \"swift-storage-0\" (UID: \"97048952-fbe0-46f2-8163-00ec9381508b\") " pod="openstack/swift-storage-0" Oct 07 08:12:19 crc kubenswrapper[4875]: E1007 08:12:19.282631 4875 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Oct 07 08:12:19 crc kubenswrapper[4875]: E1007 08:12:19.282657 4875 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Oct 07 08:12:19 crc kubenswrapper[4875]: E1007 08:12:19.282725 4875 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/97048952-fbe0-46f2-8163-00ec9381508b-etc-swift podName:97048952-fbe0-46f2-8163-00ec9381508b nodeName:}" failed. No retries permitted until 2025-10-07 08:12:27.282697998 +0000 UTC m=+972.242468541 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/97048952-fbe0-46f2-8163-00ec9381508b-etc-swift") pod "swift-storage-0" (UID: "97048952-fbe0-46f2-8163-00ec9381508b") : configmap "swift-ring-files" not found Oct 07 08:12:19 crc kubenswrapper[4875]: I1007 08:12:19.364532 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"84eec933-bbc6-4961-940a-1a26f31d2fd3","Type":"ContainerStarted","Data":"61ebae41a01bd54ddf68b8a5f259b2364bbea6a313b077f65665a114fc88d236"} Oct 07 08:12:19 crc kubenswrapper[4875]: I1007 08:12:19.364584 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"84eec933-bbc6-4961-940a-1a26f31d2fd3","Type":"ContainerStarted","Data":"8a6d2c78d6a38a5f1b505de32a5407d4b5dc3431c138859be76154bae7cf8cb4"} Oct 07 08:12:19 crc kubenswrapper[4875]: I1007 08:12:19.365034 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-northd-0" Oct 07 08:12:19 crc kubenswrapper[4875]: I1007 08:12:19.393442 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-northd-0" podStartSLOduration=2.709711813 podStartE2EDuration="10.393417111s" podCreationTimestamp="2025-10-07 08:12:09 +0000 UTC" firstStartedPulling="2025-10-07 08:12:10.466654257 +0000 UTC m=+955.426424800" lastFinishedPulling="2025-10-07 08:12:18.150359545 +0000 UTC m=+963.110130098" observedRunningTime="2025-10-07 08:12:19.390305452 +0000 UTC m=+964.350075995" watchObservedRunningTime="2025-10-07 08:12:19.393417111 +0000 UTC m=+964.353187654" Oct 07 08:12:19 crc kubenswrapper[4875]: I1007 08:12:19.710975 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e2e77fdd-ebb0-4419-8981-7a70c201d833" path="/var/lib/kubelet/pods/e2e77fdd-ebb0-4419-8981-7a70c201d833/volumes" Oct 07 08:12:19 crc kubenswrapper[4875]: W1007 08:12:19.884406 4875 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode5431993_98ef_4f56_ba68_210402d2be39.slice/crio-06db3bd2d971667820fc633a23195a307eadbd2d10b5007a43a0dd752ae54b2c WatchSource:0}: Error finding container 06db3bd2d971667820fc633a23195a307eadbd2d10b5007a43a0dd752ae54b2c: Status 404 returned error can't find the container with id 06db3bd2d971667820fc633a23195a307eadbd2d10b5007a43a0dd752ae54b2c Oct 07 08:12:20 crc kubenswrapper[4875]: I1007 08:12:20.015260 4875 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-cell1-galera-0" Oct 07 08:12:20 crc kubenswrapper[4875]: I1007 08:12:20.076068 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-cell1-galera-0" Oct 07 08:12:20 crc kubenswrapper[4875]: I1007 08:12:20.392089 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-zh4xd" event={"ID":"e5431993-98ef-4f56-ba68-210402d2be39","Type":"ContainerStarted","Data":"06db3bd2d971667820fc633a23195a307eadbd2d10b5007a43a0dd752ae54b2c"} Oct 07 08:12:21 crc kubenswrapper[4875]: I1007 08:12:21.487870 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-5dzb8"] Oct 07 08:12:21 crc kubenswrapper[4875]: W1007 08:12:21.500998 4875 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4fd749a4_1003_4a72_b637_98af9a1ff9f3.slice/crio-d764beed72e439cf9624b8f6fb7403169d997d95828738486c3697494b1c9d18 WatchSource:0}: Error finding container d764beed72e439cf9624b8f6fb7403169d997d95828738486c3697494b1c9d18: Status 404 returned error can't find the container with id d764beed72e439cf9624b8f6fb7403169d997d95828738486c3697494b1c9d18 Oct 07 08:12:22 crc kubenswrapper[4875]: I1007 08:12:22.413013 4875 generic.go:334] "Generic (PLEG): container finished" podID="4fd749a4-1003-4a72-b637-98af9a1ff9f3" containerID="a6b71bbffe51529dbd34c3083681a4cfd4ce8ca812d4b3936851251d83171151" exitCode=0 Oct 07 08:12:22 crc kubenswrapper[4875]: I1007 08:12:22.413097 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-5dzb8" event={"ID":"4fd749a4-1003-4a72-b637-98af9a1ff9f3","Type":"ContainerDied","Data":"a6b71bbffe51529dbd34c3083681a4cfd4ce8ca812d4b3936851251d83171151"} Oct 07 08:12:22 crc kubenswrapper[4875]: I1007 08:12:22.413131 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-5dzb8" event={"ID":"4fd749a4-1003-4a72-b637-98af9a1ff9f3","Type":"ContainerStarted","Data":"d764beed72e439cf9624b8f6fb7403169d997d95828738486c3697494b1c9d18"} Oct 07 08:12:22 crc kubenswrapper[4875]: I1007 08:12:22.415308 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-7bh99" event={"ID":"e4ed1841-ffa8-4d3b-8a66-43221118d007","Type":"ContainerStarted","Data":"1c6d7731d7350f8f4ecb7d744787cbfa0ce5ea8fe19c2472b77ccfab16305fba"} Oct 07 08:12:22 crc kubenswrapper[4875]: I1007 08:12:22.417053 4875 generic.go:334] "Generic (PLEG): container finished" podID="e5431993-98ef-4f56-ba68-210402d2be39" containerID="a11ea7ea99e1eebae9e175718d1b4ce6a140230b9daa178b6d6617af6ba118e8" exitCode=0 Oct 07 08:12:22 crc kubenswrapper[4875]: I1007 08:12:22.417086 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-zh4xd" event={"ID":"e5431993-98ef-4f56-ba68-210402d2be39","Type":"ContainerDied","Data":"a11ea7ea99e1eebae9e175718d1b4ce6a140230b9daa178b6d6617af6ba118e8"} Oct 07 08:12:22 crc kubenswrapper[4875]: I1007 08:12:22.473976 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-ring-rebalance-7bh99" podStartSLOduration=2.6080263009999998 podStartE2EDuration="7.473956573s" podCreationTimestamp="2025-10-07 08:12:15 +0000 UTC" firstStartedPulling="2025-10-07 08:12:16.380977878 +0000 UTC m=+961.340748431" lastFinishedPulling="2025-10-07 08:12:21.24690815 +0000 UTC m=+966.206678703" observedRunningTime="2025-10-07 08:12:22.457530277 +0000 UTC m=+967.417300820" watchObservedRunningTime="2025-10-07 08:12:22.473956573 +0000 UTC m=+967.433727196" Oct 07 08:12:23 crc kubenswrapper[4875]: I1007 08:12:23.797486 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-zh4xd" Oct 07 08:12:23 crc kubenswrapper[4875]: I1007 08:12:23.804402 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-5dzb8" Oct 07 08:12:23 crc kubenswrapper[4875]: I1007 08:12:23.892159 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w8bj8\" (UniqueName: \"kubernetes.io/projected/e5431993-98ef-4f56-ba68-210402d2be39-kube-api-access-w8bj8\") pod \"e5431993-98ef-4f56-ba68-210402d2be39\" (UID: \"e5431993-98ef-4f56-ba68-210402d2be39\") " Oct 07 08:12:23 crc kubenswrapper[4875]: I1007 08:12:23.892296 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zcmrw\" (UniqueName: \"kubernetes.io/projected/4fd749a4-1003-4a72-b637-98af9a1ff9f3-kube-api-access-zcmrw\") pod \"4fd749a4-1003-4a72-b637-98af9a1ff9f3\" (UID: \"4fd749a4-1003-4a72-b637-98af9a1ff9f3\") " Oct 07 08:12:23 crc kubenswrapper[4875]: I1007 08:12:23.899797 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4fd749a4-1003-4a72-b637-98af9a1ff9f3-kube-api-access-zcmrw" (OuterVolumeSpecName: "kube-api-access-zcmrw") pod "4fd749a4-1003-4a72-b637-98af9a1ff9f3" (UID: "4fd749a4-1003-4a72-b637-98af9a1ff9f3"). InnerVolumeSpecName "kube-api-access-zcmrw". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 08:12:23 crc kubenswrapper[4875]: I1007 08:12:23.899864 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e5431993-98ef-4f56-ba68-210402d2be39-kube-api-access-w8bj8" (OuterVolumeSpecName: "kube-api-access-w8bj8") pod "e5431993-98ef-4f56-ba68-210402d2be39" (UID: "e5431993-98ef-4f56-ba68-210402d2be39"). InnerVolumeSpecName "kube-api-access-w8bj8". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 08:12:23 crc kubenswrapper[4875]: I1007 08:12:23.995295 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zcmrw\" (UniqueName: \"kubernetes.io/projected/4fd749a4-1003-4a72-b637-98af9a1ff9f3-kube-api-access-zcmrw\") on node \"crc\" DevicePath \"\"" Oct 07 08:12:23 crc kubenswrapper[4875]: I1007 08:12:23.995365 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w8bj8\" (UniqueName: \"kubernetes.io/projected/e5431993-98ef-4f56-ba68-210402d2be39-kube-api-access-w8bj8\") on node \"crc\" DevicePath \"\"" Oct 07 08:12:24 crc kubenswrapper[4875]: I1007 08:12:24.436523 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-zh4xd" event={"ID":"e5431993-98ef-4f56-ba68-210402d2be39","Type":"ContainerDied","Data":"06db3bd2d971667820fc633a23195a307eadbd2d10b5007a43a0dd752ae54b2c"} Oct 07 08:12:24 crc kubenswrapper[4875]: I1007 08:12:24.436848 4875 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="06db3bd2d971667820fc633a23195a307eadbd2d10b5007a43a0dd752ae54b2c" Oct 07 08:12:24 crc kubenswrapper[4875]: I1007 08:12:24.436580 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-zh4xd" Oct 07 08:12:24 crc kubenswrapper[4875]: I1007 08:12:24.438171 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-5dzb8" event={"ID":"4fd749a4-1003-4a72-b637-98af9a1ff9f3","Type":"ContainerDied","Data":"d764beed72e439cf9624b8f6fb7403169d997d95828738486c3697494b1c9d18"} Oct 07 08:12:24 crc kubenswrapper[4875]: I1007 08:12:24.438284 4875 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d764beed72e439cf9624b8f6fb7403169d997d95828738486c3697494b1c9d18" Oct 07 08:12:24 crc kubenswrapper[4875]: I1007 08:12:24.438189 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-5dzb8" Oct 07 08:12:25 crc kubenswrapper[4875]: I1007 08:12:25.743053 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-b8fbc5445-crjg7" Oct 07 08:12:25 crc kubenswrapper[4875]: I1007 08:12:25.822960 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-8554648995-lwp8c"] Oct 07 08:12:25 crc kubenswrapper[4875]: I1007 08:12:25.823242 4875 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-8554648995-lwp8c" podUID="9e6f843e-4ba3-46ef-b9ee-2ced2a5e201c" containerName="dnsmasq-dns" containerID="cri-o://71aa8220f7c4b799545b0b8e62de0a98225351af2766cd97cf3bc610ff0ecba4" gracePeriod=10 Oct 07 08:12:26 crc kubenswrapper[4875]: I1007 08:12:26.348316 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8554648995-lwp8c" Oct 07 08:12:26 crc kubenswrapper[4875]: I1007 08:12:26.448256 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9e6f843e-4ba3-46ef-b9ee-2ced2a5e201c-ovsdbserver-sb\") pod \"9e6f843e-4ba3-46ef-b9ee-2ced2a5e201c\" (UID: \"9e6f843e-4ba3-46ef-b9ee-2ced2a5e201c\") " Oct 07 08:12:26 crc kubenswrapper[4875]: I1007 08:12:26.448318 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9e6f843e-4ba3-46ef-b9ee-2ced2a5e201c-config\") pod \"9e6f843e-4ba3-46ef-b9ee-2ced2a5e201c\" (UID: \"9e6f843e-4ba3-46ef-b9ee-2ced2a5e201c\") " Oct 07 08:12:26 crc kubenswrapper[4875]: I1007 08:12:26.448389 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xqhqj\" (UniqueName: \"kubernetes.io/projected/9e6f843e-4ba3-46ef-b9ee-2ced2a5e201c-kube-api-access-xqhqj\") pod \"9e6f843e-4ba3-46ef-b9ee-2ced2a5e201c\" (UID: \"9e6f843e-4ba3-46ef-b9ee-2ced2a5e201c\") " Oct 07 08:12:26 crc kubenswrapper[4875]: I1007 08:12:26.448488 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9e6f843e-4ba3-46ef-b9ee-2ced2a5e201c-dns-svc\") pod \"9e6f843e-4ba3-46ef-b9ee-2ced2a5e201c\" (UID: \"9e6f843e-4ba3-46ef-b9ee-2ced2a5e201c\") " Oct 07 08:12:26 crc kubenswrapper[4875]: I1007 08:12:26.448557 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9e6f843e-4ba3-46ef-b9ee-2ced2a5e201c-ovsdbserver-nb\") pod \"9e6f843e-4ba3-46ef-b9ee-2ced2a5e201c\" (UID: \"9e6f843e-4ba3-46ef-b9ee-2ced2a5e201c\") " Oct 07 08:12:26 crc kubenswrapper[4875]: I1007 08:12:26.473164 4875 generic.go:334] "Generic (PLEG): container finished" podID="9e6f843e-4ba3-46ef-b9ee-2ced2a5e201c" containerID="71aa8220f7c4b799545b0b8e62de0a98225351af2766cd97cf3bc610ff0ecba4" exitCode=0 Oct 07 08:12:26 crc kubenswrapper[4875]: I1007 08:12:26.473219 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8554648995-lwp8c" event={"ID":"9e6f843e-4ba3-46ef-b9ee-2ced2a5e201c","Type":"ContainerDied","Data":"71aa8220f7c4b799545b0b8e62de0a98225351af2766cd97cf3bc610ff0ecba4"} Oct 07 08:12:26 crc kubenswrapper[4875]: I1007 08:12:26.473252 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8554648995-lwp8c" event={"ID":"9e6f843e-4ba3-46ef-b9ee-2ced2a5e201c","Type":"ContainerDied","Data":"907e17110f29303e14f55c9953fc50be5e441e67982cb55416b3a29787c306f2"} Oct 07 08:12:26 crc kubenswrapper[4875]: I1007 08:12:26.473272 4875 scope.go:117] "RemoveContainer" containerID="71aa8220f7c4b799545b0b8e62de0a98225351af2766cd97cf3bc610ff0ecba4" Oct 07 08:12:26 crc kubenswrapper[4875]: I1007 08:12:26.473266 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8554648995-lwp8c" Oct 07 08:12:26 crc kubenswrapper[4875]: I1007 08:12:26.481158 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9e6f843e-4ba3-46ef-b9ee-2ced2a5e201c-kube-api-access-xqhqj" (OuterVolumeSpecName: "kube-api-access-xqhqj") pod "9e6f843e-4ba3-46ef-b9ee-2ced2a5e201c" (UID: "9e6f843e-4ba3-46ef-b9ee-2ced2a5e201c"). InnerVolumeSpecName "kube-api-access-xqhqj". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 08:12:26 crc kubenswrapper[4875]: I1007 08:12:26.495592 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9e6f843e-4ba3-46ef-b9ee-2ced2a5e201c-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "9e6f843e-4ba3-46ef-b9ee-2ced2a5e201c" (UID: "9e6f843e-4ba3-46ef-b9ee-2ced2a5e201c"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 08:12:26 crc kubenswrapper[4875]: I1007 08:12:26.495096 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9e6f843e-4ba3-46ef-b9ee-2ced2a5e201c-config" (OuterVolumeSpecName: "config") pod "9e6f843e-4ba3-46ef-b9ee-2ced2a5e201c" (UID: "9e6f843e-4ba3-46ef-b9ee-2ced2a5e201c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 08:12:26 crc kubenswrapper[4875]: I1007 08:12:26.497380 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9e6f843e-4ba3-46ef-b9ee-2ced2a5e201c-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "9e6f843e-4ba3-46ef-b9ee-2ced2a5e201c" (UID: "9e6f843e-4ba3-46ef-b9ee-2ced2a5e201c"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 08:12:26 crc kubenswrapper[4875]: I1007 08:12:26.506368 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9e6f843e-4ba3-46ef-b9ee-2ced2a5e201c-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "9e6f843e-4ba3-46ef-b9ee-2ced2a5e201c" (UID: "9e6f843e-4ba3-46ef-b9ee-2ced2a5e201c"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 08:12:26 crc kubenswrapper[4875]: I1007 08:12:26.550851 4875 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9e6f843e-4ba3-46ef-b9ee-2ced2a5e201c-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 07 08:12:26 crc kubenswrapper[4875]: I1007 08:12:26.551318 4875 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9e6f843e-4ba3-46ef-b9ee-2ced2a5e201c-config\") on node \"crc\" DevicePath \"\"" Oct 07 08:12:26 crc kubenswrapper[4875]: I1007 08:12:26.551336 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xqhqj\" (UniqueName: \"kubernetes.io/projected/9e6f843e-4ba3-46ef-b9ee-2ced2a5e201c-kube-api-access-xqhqj\") on node \"crc\" DevicePath \"\"" Oct 07 08:12:26 crc kubenswrapper[4875]: I1007 08:12:26.551352 4875 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9e6f843e-4ba3-46ef-b9ee-2ced2a5e201c-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 07 08:12:26 crc kubenswrapper[4875]: I1007 08:12:26.551364 4875 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9e6f843e-4ba3-46ef-b9ee-2ced2a5e201c-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 07 08:12:26 crc kubenswrapper[4875]: I1007 08:12:26.559208 4875 scope.go:117] "RemoveContainer" containerID="c4d99d35882e8d71109f7d7dec088f33215e61df809a11230c387abbd1874122" Oct 07 08:12:26 crc kubenswrapper[4875]: I1007 08:12:26.593685 4875 scope.go:117] "RemoveContainer" containerID="71aa8220f7c4b799545b0b8e62de0a98225351af2766cd97cf3bc610ff0ecba4" Oct 07 08:12:26 crc kubenswrapper[4875]: E1007 08:12:26.594255 4875 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"71aa8220f7c4b799545b0b8e62de0a98225351af2766cd97cf3bc610ff0ecba4\": container with ID starting with 71aa8220f7c4b799545b0b8e62de0a98225351af2766cd97cf3bc610ff0ecba4 not found: ID does not exist" containerID="71aa8220f7c4b799545b0b8e62de0a98225351af2766cd97cf3bc610ff0ecba4" Oct 07 08:12:26 crc kubenswrapper[4875]: I1007 08:12:26.594290 4875 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"71aa8220f7c4b799545b0b8e62de0a98225351af2766cd97cf3bc610ff0ecba4"} err="failed to get container status \"71aa8220f7c4b799545b0b8e62de0a98225351af2766cd97cf3bc610ff0ecba4\": rpc error: code = NotFound desc = could not find container \"71aa8220f7c4b799545b0b8e62de0a98225351af2766cd97cf3bc610ff0ecba4\": container with ID starting with 71aa8220f7c4b799545b0b8e62de0a98225351af2766cd97cf3bc610ff0ecba4 not found: ID does not exist" Oct 07 08:12:26 crc kubenswrapper[4875]: I1007 08:12:26.594321 4875 scope.go:117] "RemoveContainer" containerID="c4d99d35882e8d71109f7d7dec088f33215e61df809a11230c387abbd1874122" Oct 07 08:12:26 crc kubenswrapper[4875]: E1007 08:12:26.597002 4875 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c4d99d35882e8d71109f7d7dec088f33215e61df809a11230c387abbd1874122\": container with ID starting with c4d99d35882e8d71109f7d7dec088f33215e61df809a11230c387abbd1874122 not found: ID does not exist" containerID="c4d99d35882e8d71109f7d7dec088f33215e61df809a11230c387abbd1874122" Oct 07 08:12:26 crc kubenswrapper[4875]: I1007 08:12:26.597033 4875 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c4d99d35882e8d71109f7d7dec088f33215e61df809a11230c387abbd1874122"} err="failed to get container status \"c4d99d35882e8d71109f7d7dec088f33215e61df809a11230c387abbd1874122\": rpc error: code = NotFound desc = could not find container \"c4d99d35882e8d71109f7d7dec088f33215e61df809a11230c387abbd1874122\": container with ID starting with c4d99d35882e8d71109f7d7dec088f33215e61df809a11230c387abbd1874122 not found: ID does not exist" Oct 07 08:12:26 crc kubenswrapper[4875]: I1007 08:12:26.803584 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-8554648995-lwp8c"] Oct 07 08:12:26 crc kubenswrapper[4875]: I1007 08:12:26.816325 4875 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-8554648995-lwp8c"] Oct 07 08:12:27 crc kubenswrapper[4875]: I1007 08:12:27.371487 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/97048952-fbe0-46f2-8163-00ec9381508b-etc-swift\") pod \"swift-storage-0\" (UID: \"97048952-fbe0-46f2-8163-00ec9381508b\") " pod="openstack/swift-storage-0" Oct 07 08:12:27 crc kubenswrapper[4875]: E1007 08:12:27.371763 4875 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Oct 07 08:12:27 crc kubenswrapper[4875]: E1007 08:12:27.371807 4875 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Oct 07 08:12:27 crc kubenswrapper[4875]: E1007 08:12:27.371900 4875 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/97048952-fbe0-46f2-8163-00ec9381508b-etc-swift podName:97048952-fbe0-46f2-8163-00ec9381508b nodeName:}" failed. No retries permitted until 2025-10-07 08:12:43.371854418 +0000 UTC m=+988.331624981 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/97048952-fbe0-46f2-8163-00ec9381508b-etc-swift") pod "swift-storage-0" (UID: "97048952-fbe0-46f2-8163-00ec9381508b") : configmap "swift-ring-files" not found Oct 07 08:12:27 crc kubenswrapper[4875]: I1007 08:12:27.708465 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9e6f843e-4ba3-46ef-b9ee-2ced2a5e201c" path="/var/lib/kubelet/pods/9e6f843e-4ba3-46ef-b9ee-2ced2a5e201c/volumes" Oct 07 08:12:27 crc kubenswrapper[4875]: I1007 08:12:27.893142 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-create-2cw6b"] Oct 07 08:12:27 crc kubenswrapper[4875]: E1007 08:12:27.893703 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9e6f843e-4ba3-46ef-b9ee-2ced2a5e201c" containerName="init" Oct 07 08:12:27 crc kubenswrapper[4875]: I1007 08:12:27.893726 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="9e6f843e-4ba3-46ef-b9ee-2ced2a5e201c" containerName="init" Oct 07 08:12:27 crc kubenswrapper[4875]: E1007 08:12:27.893743 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4fd749a4-1003-4a72-b637-98af9a1ff9f3" containerName="mariadb-database-create" Oct 07 08:12:27 crc kubenswrapper[4875]: I1007 08:12:27.893754 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="4fd749a4-1003-4a72-b637-98af9a1ff9f3" containerName="mariadb-database-create" Oct 07 08:12:27 crc kubenswrapper[4875]: E1007 08:12:27.893779 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9e6f843e-4ba3-46ef-b9ee-2ced2a5e201c" containerName="dnsmasq-dns" Oct 07 08:12:27 crc kubenswrapper[4875]: I1007 08:12:27.893791 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="9e6f843e-4ba3-46ef-b9ee-2ced2a5e201c" containerName="dnsmasq-dns" Oct 07 08:12:27 crc kubenswrapper[4875]: E1007 08:12:27.893805 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e5431993-98ef-4f56-ba68-210402d2be39" containerName="mariadb-database-create" Oct 07 08:12:27 crc kubenswrapper[4875]: I1007 08:12:27.893818 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="e5431993-98ef-4f56-ba68-210402d2be39" containerName="mariadb-database-create" Oct 07 08:12:27 crc kubenswrapper[4875]: I1007 08:12:27.894103 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="9e6f843e-4ba3-46ef-b9ee-2ced2a5e201c" containerName="dnsmasq-dns" Oct 07 08:12:27 crc kubenswrapper[4875]: I1007 08:12:27.894140 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="e5431993-98ef-4f56-ba68-210402d2be39" containerName="mariadb-database-create" Oct 07 08:12:27 crc kubenswrapper[4875]: I1007 08:12:27.894169 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="4fd749a4-1003-4a72-b637-98af9a1ff9f3" containerName="mariadb-database-create" Oct 07 08:12:27 crc kubenswrapper[4875]: I1007 08:12:27.895241 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-2cw6b" Oct 07 08:12:27 crc kubenswrapper[4875]: I1007 08:12:27.910438 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-2cw6b"] Oct 07 08:12:27 crc kubenswrapper[4875]: I1007 08:12:27.983393 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sk7bz\" (UniqueName: \"kubernetes.io/projected/1f4af0be-71a8-464b-90d9-909fcbe4cf80-kube-api-access-sk7bz\") pod \"keystone-db-create-2cw6b\" (UID: \"1f4af0be-71a8-464b-90d9-909fcbe4cf80\") " pod="openstack/keystone-db-create-2cw6b" Oct 07 08:12:28 crc kubenswrapper[4875]: I1007 08:12:28.084759 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sk7bz\" (UniqueName: \"kubernetes.io/projected/1f4af0be-71a8-464b-90d9-909fcbe4cf80-kube-api-access-sk7bz\") pod \"keystone-db-create-2cw6b\" (UID: \"1f4af0be-71a8-464b-90d9-909fcbe4cf80\") " pod="openstack/keystone-db-create-2cw6b" Oct 07 08:12:28 crc kubenswrapper[4875]: I1007 08:12:28.106998 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sk7bz\" (UniqueName: \"kubernetes.io/projected/1f4af0be-71a8-464b-90d9-909fcbe4cf80-kube-api-access-sk7bz\") pod \"keystone-db-create-2cw6b\" (UID: \"1f4af0be-71a8-464b-90d9-909fcbe4cf80\") " pod="openstack/keystone-db-create-2cw6b" Oct 07 08:12:28 crc kubenswrapper[4875]: I1007 08:12:28.252002 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-2cw6b" Oct 07 08:12:28 crc kubenswrapper[4875]: I1007 08:12:28.498694 4875 generic.go:334] "Generic (PLEG): container finished" podID="e4ed1841-ffa8-4d3b-8a66-43221118d007" containerID="1c6d7731d7350f8f4ecb7d744787cbfa0ce5ea8fe19c2472b77ccfab16305fba" exitCode=0 Oct 07 08:12:28 crc kubenswrapper[4875]: I1007 08:12:28.498758 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-7bh99" event={"ID":"e4ed1841-ffa8-4d3b-8a66-43221118d007","Type":"ContainerDied","Data":"1c6d7731d7350f8f4ecb7d744787cbfa0ce5ea8fe19c2472b77ccfab16305fba"} Oct 07 08:12:28 crc kubenswrapper[4875]: I1007 08:12:28.665575 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-8477-account-create-7z2lk"] Oct 07 08:12:28 crc kubenswrapper[4875]: I1007 08:12:28.667208 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-8477-account-create-7z2lk" Oct 07 08:12:28 crc kubenswrapper[4875]: I1007 08:12:28.670125 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-db-secret" Oct 07 08:12:28 crc kubenswrapper[4875]: I1007 08:12:28.679931 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-8477-account-create-7z2lk"] Oct 07 08:12:28 crc kubenswrapper[4875]: I1007 08:12:28.720357 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-2cw6b"] Oct 07 08:12:28 crc kubenswrapper[4875]: I1007 08:12:28.801179 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-72jzz\" (UniqueName: \"kubernetes.io/projected/f2a895c5-0206-4d29-b1e8-ef70102ec8bd-kube-api-access-72jzz\") pod \"glance-8477-account-create-7z2lk\" (UID: \"f2a895c5-0206-4d29-b1e8-ef70102ec8bd\") " pod="openstack/glance-8477-account-create-7z2lk" Oct 07 08:12:28 crc kubenswrapper[4875]: I1007 08:12:28.902805 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-72jzz\" (UniqueName: \"kubernetes.io/projected/f2a895c5-0206-4d29-b1e8-ef70102ec8bd-kube-api-access-72jzz\") pod \"glance-8477-account-create-7z2lk\" (UID: \"f2a895c5-0206-4d29-b1e8-ef70102ec8bd\") " pod="openstack/glance-8477-account-create-7z2lk" Oct 07 08:12:28 crc kubenswrapper[4875]: I1007 08:12:28.921000 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-72jzz\" (UniqueName: \"kubernetes.io/projected/f2a895c5-0206-4d29-b1e8-ef70102ec8bd-kube-api-access-72jzz\") pod \"glance-8477-account-create-7z2lk\" (UID: \"f2a895c5-0206-4d29-b1e8-ef70102ec8bd\") " pod="openstack/glance-8477-account-create-7z2lk" Oct 07 08:12:28 crc kubenswrapper[4875]: I1007 08:12:28.987995 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-8477-account-create-7z2lk" Oct 07 08:12:29 crc kubenswrapper[4875]: I1007 08:12:29.422682 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-8477-account-create-7z2lk"] Oct 07 08:12:29 crc kubenswrapper[4875]: I1007 08:12:29.507512 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-8477-account-create-7z2lk" event={"ID":"f2a895c5-0206-4d29-b1e8-ef70102ec8bd","Type":"ContainerStarted","Data":"7476629deb283081a24e7003195acc21f726cf7194c5b482a6359ef4e38f90d0"} Oct 07 08:12:29 crc kubenswrapper[4875]: I1007 08:12:29.509668 4875 generic.go:334] "Generic (PLEG): container finished" podID="1f4af0be-71a8-464b-90d9-909fcbe4cf80" containerID="9d5172e3eb8ff1d18717a04fd657a3a810ea415aeb50644ee62d5d081a2d2aa9" exitCode=0 Oct 07 08:12:29 crc kubenswrapper[4875]: I1007 08:12:29.509748 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-2cw6b" event={"ID":"1f4af0be-71a8-464b-90d9-909fcbe4cf80","Type":"ContainerDied","Data":"9d5172e3eb8ff1d18717a04fd657a3a810ea415aeb50644ee62d5d081a2d2aa9"} Oct 07 08:12:29 crc kubenswrapper[4875]: I1007 08:12:29.509785 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-2cw6b" event={"ID":"1f4af0be-71a8-464b-90d9-909fcbe4cf80","Type":"ContainerStarted","Data":"74df61e1c2fca66e2e6e9663d99015ce147c9a14c8322971731d2adc00eafecd"} Oct 07 08:12:29 crc kubenswrapper[4875]: I1007 08:12:29.858116 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-7bh99" Oct 07 08:12:29 crc kubenswrapper[4875]: I1007 08:12:29.966102 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-northd-0" Oct 07 08:12:30 crc kubenswrapper[4875]: I1007 08:12:30.036217 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t8f7f\" (UniqueName: \"kubernetes.io/projected/e4ed1841-ffa8-4d3b-8a66-43221118d007-kube-api-access-t8f7f\") pod \"e4ed1841-ffa8-4d3b-8a66-43221118d007\" (UID: \"e4ed1841-ffa8-4d3b-8a66-43221118d007\") " Oct 07 08:12:30 crc kubenswrapper[4875]: I1007 08:12:30.036824 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e4ed1841-ffa8-4d3b-8a66-43221118d007-combined-ca-bundle\") pod \"e4ed1841-ffa8-4d3b-8a66-43221118d007\" (UID: \"e4ed1841-ffa8-4d3b-8a66-43221118d007\") " Oct 07 08:12:30 crc kubenswrapper[4875]: I1007 08:12:30.036859 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/e4ed1841-ffa8-4d3b-8a66-43221118d007-etc-swift\") pod \"e4ed1841-ffa8-4d3b-8a66-43221118d007\" (UID: \"e4ed1841-ffa8-4d3b-8a66-43221118d007\") " Oct 07 08:12:30 crc kubenswrapper[4875]: I1007 08:12:30.036933 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e4ed1841-ffa8-4d3b-8a66-43221118d007-scripts\") pod \"e4ed1841-ffa8-4d3b-8a66-43221118d007\" (UID: \"e4ed1841-ffa8-4d3b-8a66-43221118d007\") " Oct 07 08:12:30 crc kubenswrapper[4875]: I1007 08:12:30.037101 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/e4ed1841-ffa8-4d3b-8a66-43221118d007-swiftconf\") pod \"e4ed1841-ffa8-4d3b-8a66-43221118d007\" (UID: \"e4ed1841-ffa8-4d3b-8a66-43221118d007\") " Oct 07 08:12:30 crc kubenswrapper[4875]: I1007 08:12:30.037179 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/e4ed1841-ffa8-4d3b-8a66-43221118d007-ring-data-devices\") pod \"e4ed1841-ffa8-4d3b-8a66-43221118d007\" (UID: \"e4ed1841-ffa8-4d3b-8a66-43221118d007\") " Oct 07 08:12:30 crc kubenswrapper[4875]: I1007 08:12:30.037252 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/e4ed1841-ffa8-4d3b-8a66-43221118d007-dispersionconf\") pod \"e4ed1841-ffa8-4d3b-8a66-43221118d007\" (UID: \"e4ed1841-ffa8-4d3b-8a66-43221118d007\") " Oct 07 08:12:30 crc kubenswrapper[4875]: I1007 08:12:30.039039 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e4ed1841-ffa8-4d3b-8a66-43221118d007-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "e4ed1841-ffa8-4d3b-8a66-43221118d007" (UID: "e4ed1841-ffa8-4d3b-8a66-43221118d007"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 08:12:30 crc kubenswrapper[4875]: I1007 08:12:30.039867 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e4ed1841-ffa8-4d3b-8a66-43221118d007-ring-data-devices" (OuterVolumeSpecName: "ring-data-devices") pod "e4ed1841-ffa8-4d3b-8a66-43221118d007" (UID: "e4ed1841-ffa8-4d3b-8a66-43221118d007"). InnerVolumeSpecName "ring-data-devices". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 08:12:30 crc kubenswrapper[4875]: I1007 08:12:30.058297 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e4ed1841-ffa8-4d3b-8a66-43221118d007-kube-api-access-t8f7f" (OuterVolumeSpecName: "kube-api-access-t8f7f") pod "e4ed1841-ffa8-4d3b-8a66-43221118d007" (UID: "e4ed1841-ffa8-4d3b-8a66-43221118d007"). InnerVolumeSpecName "kube-api-access-t8f7f". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 08:12:30 crc kubenswrapper[4875]: I1007 08:12:30.065699 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e4ed1841-ffa8-4d3b-8a66-43221118d007-dispersionconf" (OuterVolumeSpecName: "dispersionconf") pod "e4ed1841-ffa8-4d3b-8a66-43221118d007" (UID: "e4ed1841-ffa8-4d3b-8a66-43221118d007"). InnerVolumeSpecName "dispersionconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:12:30 crc kubenswrapper[4875]: I1007 08:12:30.077565 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e4ed1841-ffa8-4d3b-8a66-43221118d007-swiftconf" (OuterVolumeSpecName: "swiftconf") pod "e4ed1841-ffa8-4d3b-8a66-43221118d007" (UID: "e4ed1841-ffa8-4d3b-8a66-43221118d007"). InnerVolumeSpecName "swiftconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:12:30 crc kubenswrapper[4875]: I1007 08:12:30.078033 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e4ed1841-ffa8-4d3b-8a66-43221118d007-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e4ed1841-ffa8-4d3b-8a66-43221118d007" (UID: "e4ed1841-ffa8-4d3b-8a66-43221118d007"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:12:30 crc kubenswrapper[4875]: I1007 08:12:30.080404 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e4ed1841-ffa8-4d3b-8a66-43221118d007-scripts" (OuterVolumeSpecName: "scripts") pod "e4ed1841-ffa8-4d3b-8a66-43221118d007" (UID: "e4ed1841-ffa8-4d3b-8a66-43221118d007"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 08:12:30 crc kubenswrapper[4875]: I1007 08:12:30.140488 4875 reconciler_common.go:293] "Volume detached for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/e4ed1841-ffa8-4d3b-8a66-43221118d007-swiftconf\") on node \"crc\" DevicePath \"\"" Oct 07 08:12:30 crc kubenswrapper[4875]: I1007 08:12:30.140527 4875 reconciler_common.go:293] "Volume detached for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/e4ed1841-ffa8-4d3b-8a66-43221118d007-ring-data-devices\") on node \"crc\" DevicePath \"\"" Oct 07 08:12:30 crc kubenswrapper[4875]: I1007 08:12:30.140543 4875 reconciler_common.go:293] "Volume detached for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/e4ed1841-ffa8-4d3b-8a66-43221118d007-dispersionconf\") on node \"crc\" DevicePath \"\"" Oct 07 08:12:30 crc kubenswrapper[4875]: I1007 08:12:30.140555 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t8f7f\" (UniqueName: \"kubernetes.io/projected/e4ed1841-ffa8-4d3b-8a66-43221118d007-kube-api-access-t8f7f\") on node \"crc\" DevicePath \"\"" Oct 07 08:12:30 crc kubenswrapper[4875]: I1007 08:12:30.140567 4875 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e4ed1841-ffa8-4d3b-8a66-43221118d007-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 08:12:30 crc kubenswrapper[4875]: I1007 08:12:30.140579 4875 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/e4ed1841-ffa8-4d3b-8a66-43221118d007-etc-swift\") on node \"crc\" DevicePath \"\"" Oct 07 08:12:30 crc kubenswrapper[4875]: I1007 08:12:30.140590 4875 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e4ed1841-ffa8-4d3b-8a66-43221118d007-scripts\") on node \"crc\" DevicePath \"\"" Oct 07 08:12:30 crc kubenswrapper[4875]: I1007 08:12:30.520642 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-7bh99" event={"ID":"e4ed1841-ffa8-4d3b-8a66-43221118d007","Type":"ContainerDied","Data":"1ff3d0df4300d2f12a2905f1727f68a9833afc943aa84d260c5b8a6a308b5056"} Oct 07 08:12:30 crc kubenswrapper[4875]: I1007 08:12:30.520703 4875 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1ff3d0df4300d2f12a2905f1727f68a9833afc943aa84d260c5b8a6a308b5056" Oct 07 08:12:30 crc kubenswrapper[4875]: I1007 08:12:30.521138 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-7bh99" Oct 07 08:12:30 crc kubenswrapper[4875]: I1007 08:12:30.523118 4875 generic.go:334] "Generic (PLEG): container finished" podID="f2a895c5-0206-4d29-b1e8-ef70102ec8bd" containerID="535c8ae83c7e60d6cc18ef9d9c98aa0b4e125ca84130b7c621a2defb1f269c5f" exitCode=0 Oct 07 08:12:30 crc kubenswrapper[4875]: I1007 08:12:30.523185 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-8477-account-create-7z2lk" event={"ID":"f2a895c5-0206-4d29-b1e8-ef70102ec8bd","Type":"ContainerDied","Data":"535c8ae83c7e60d6cc18ef9d9c98aa0b4e125ca84130b7c621a2defb1f269c5f"} Oct 07 08:12:30 crc kubenswrapper[4875]: I1007 08:12:30.977017 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-2cw6b" Oct 07 08:12:31 crc kubenswrapper[4875]: I1007 08:12:31.058960 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sk7bz\" (UniqueName: \"kubernetes.io/projected/1f4af0be-71a8-464b-90d9-909fcbe4cf80-kube-api-access-sk7bz\") pod \"1f4af0be-71a8-464b-90d9-909fcbe4cf80\" (UID: \"1f4af0be-71a8-464b-90d9-909fcbe4cf80\") " Oct 07 08:12:31 crc kubenswrapper[4875]: I1007 08:12:31.064200 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1f4af0be-71a8-464b-90d9-909fcbe4cf80-kube-api-access-sk7bz" (OuterVolumeSpecName: "kube-api-access-sk7bz") pod "1f4af0be-71a8-464b-90d9-909fcbe4cf80" (UID: "1f4af0be-71a8-464b-90d9-909fcbe4cf80"). InnerVolumeSpecName "kube-api-access-sk7bz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 08:12:31 crc kubenswrapper[4875]: I1007 08:12:31.161538 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sk7bz\" (UniqueName: \"kubernetes.io/projected/1f4af0be-71a8-464b-90d9-909fcbe4cf80-kube-api-access-sk7bz\") on node \"crc\" DevicePath \"\"" Oct 07 08:12:31 crc kubenswrapper[4875]: I1007 08:12:31.541183 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-2cw6b" Oct 07 08:12:31 crc kubenswrapper[4875]: I1007 08:12:31.541220 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-2cw6b" event={"ID":"1f4af0be-71a8-464b-90d9-909fcbe4cf80","Type":"ContainerDied","Data":"74df61e1c2fca66e2e6e9663d99015ce147c9a14c8322971731d2adc00eafecd"} Oct 07 08:12:31 crc kubenswrapper[4875]: I1007 08:12:31.541293 4875 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="74df61e1c2fca66e2e6e9663d99015ce147c9a14c8322971731d2adc00eafecd" Oct 07 08:12:31 crc kubenswrapper[4875]: I1007 08:12:31.982007 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-8477-account-create-7z2lk" Oct 07 08:12:32 crc kubenswrapper[4875]: I1007 08:12:32.184979 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-72jzz\" (UniqueName: \"kubernetes.io/projected/f2a895c5-0206-4d29-b1e8-ef70102ec8bd-kube-api-access-72jzz\") pod \"f2a895c5-0206-4d29-b1e8-ef70102ec8bd\" (UID: \"f2a895c5-0206-4d29-b1e8-ef70102ec8bd\") " Oct 07 08:12:32 crc kubenswrapper[4875]: I1007 08:12:32.188600 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f2a895c5-0206-4d29-b1e8-ef70102ec8bd-kube-api-access-72jzz" (OuterVolumeSpecName: "kube-api-access-72jzz") pod "f2a895c5-0206-4d29-b1e8-ef70102ec8bd" (UID: "f2a895c5-0206-4d29-b1e8-ef70102ec8bd"). InnerVolumeSpecName "kube-api-access-72jzz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 08:12:32 crc kubenswrapper[4875]: I1007 08:12:32.287179 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-72jzz\" (UniqueName: \"kubernetes.io/projected/f2a895c5-0206-4d29-b1e8-ef70102ec8bd-kube-api-access-72jzz\") on node \"crc\" DevicePath \"\"" Oct 07 08:12:32 crc kubenswrapper[4875]: I1007 08:12:32.550424 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-8477-account-create-7z2lk" event={"ID":"f2a895c5-0206-4d29-b1e8-ef70102ec8bd","Type":"ContainerDied","Data":"7476629deb283081a24e7003195acc21f726cf7194c5b482a6359ef4e38f90d0"} Oct 07 08:12:32 crc kubenswrapper[4875]: I1007 08:12:32.550742 4875 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7476629deb283081a24e7003195acc21f726cf7194c5b482a6359ef4e38f90d0" Oct 07 08:12:32 crc kubenswrapper[4875]: I1007 08:12:32.550504 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-8477-account-create-7z2lk" Oct 07 08:12:33 crc kubenswrapper[4875]: I1007 08:12:33.804806 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-sync-frvnr"] Oct 07 08:12:33 crc kubenswrapper[4875]: E1007 08:12:33.805219 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e4ed1841-ffa8-4d3b-8a66-43221118d007" containerName="swift-ring-rebalance" Oct 07 08:12:33 crc kubenswrapper[4875]: I1007 08:12:33.805230 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="e4ed1841-ffa8-4d3b-8a66-43221118d007" containerName="swift-ring-rebalance" Oct 07 08:12:33 crc kubenswrapper[4875]: E1007 08:12:33.805239 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1f4af0be-71a8-464b-90d9-909fcbe4cf80" containerName="mariadb-database-create" Oct 07 08:12:33 crc kubenswrapper[4875]: I1007 08:12:33.805245 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="1f4af0be-71a8-464b-90d9-909fcbe4cf80" containerName="mariadb-database-create" Oct 07 08:12:33 crc kubenswrapper[4875]: E1007 08:12:33.805256 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f2a895c5-0206-4d29-b1e8-ef70102ec8bd" containerName="mariadb-account-create" Oct 07 08:12:33 crc kubenswrapper[4875]: I1007 08:12:33.805262 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="f2a895c5-0206-4d29-b1e8-ef70102ec8bd" containerName="mariadb-account-create" Oct 07 08:12:33 crc kubenswrapper[4875]: I1007 08:12:33.805419 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="1f4af0be-71a8-464b-90d9-909fcbe4cf80" containerName="mariadb-database-create" Oct 07 08:12:33 crc kubenswrapper[4875]: I1007 08:12:33.805430 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="f2a895c5-0206-4d29-b1e8-ef70102ec8bd" containerName="mariadb-account-create" Oct 07 08:12:33 crc kubenswrapper[4875]: I1007 08:12:33.805441 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="e4ed1841-ffa8-4d3b-8a66-43221118d007" containerName="swift-ring-rebalance" Oct 07 08:12:33 crc kubenswrapper[4875]: I1007 08:12:33.805998 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-frvnr" Oct 07 08:12:33 crc kubenswrapper[4875]: I1007 08:12:33.808580 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-ldtf9" Oct 07 08:12:33 crc kubenswrapper[4875]: I1007 08:12:33.812912 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/831e9d9a-5dbe-4bde-bba5-e5eec53bbe9d-combined-ca-bundle\") pod \"glance-db-sync-frvnr\" (UID: \"831e9d9a-5dbe-4bde-bba5-e5eec53bbe9d\") " pod="openstack/glance-db-sync-frvnr" Oct 07 08:12:33 crc kubenswrapper[4875]: I1007 08:12:33.813000 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xmbcq\" (UniqueName: \"kubernetes.io/projected/831e9d9a-5dbe-4bde-bba5-e5eec53bbe9d-kube-api-access-xmbcq\") pod \"glance-db-sync-frvnr\" (UID: \"831e9d9a-5dbe-4bde-bba5-e5eec53bbe9d\") " pod="openstack/glance-db-sync-frvnr" Oct 07 08:12:33 crc kubenswrapper[4875]: I1007 08:12:33.813027 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/831e9d9a-5dbe-4bde-bba5-e5eec53bbe9d-config-data\") pod \"glance-db-sync-frvnr\" (UID: \"831e9d9a-5dbe-4bde-bba5-e5eec53bbe9d\") " pod="openstack/glance-db-sync-frvnr" Oct 07 08:12:33 crc kubenswrapper[4875]: I1007 08:12:33.813069 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/831e9d9a-5dbe-4bde-bba5-e5eec53bbe9d-db-sync-config-data\") pod \"glance-db-sync-frvnr\" (UID: \"831e9d9a-5dbe-4bde-bba5-e5eec53bbe9d\") " pod="openstack/glance-db-sync-frvnr" Oct 07 08:12:33 crc kubenswrapper[4875]: I1007 08:12:33.813208 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-config-data" Oct 07 08:12:33 crc kubenswrapper[4875]: I1007 08:12:33.825349 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-frvnr"] Oct 07 08:12:33 crc kubenswrapper[4875]: I1007 08:12:33.847302 4875 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-npwcn" podUID="97a62f1e-e3e0-4592-82a8-2524ba6df291" containerName="ovn-controller" probeResult="failure" output=< Oct 07 08:12:33 crc kubenswrapper[4875]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Oct 07 08:12:33 crc kubenswrapper[4875]: > Oct 07 08:12:33 crc kubenswrapper[4875]: I1007 08:12:33.858529 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-vwbf5" Oct 07 08:12:33 crc kubenswrapper[4875]: I1007 08:12:33.862507 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-vwbf5" Oct 07 08:12:33 crc kubenswrapper[4875]: I1007 08:12:33.914467 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/831e9d9a-5dbe-4bde-bba5-e5eec53bbe9d-db-sync-config-data\") pod \"glance-db-sync-frvnr\" (UID: \"831e9d9a-5dbe-4bde-bba5-e5eec53bbe9d\") " pod="openstack/glance-db-sync-frvnr" Oct 07 08:12:33 crc kubenswrapper[4875]: I1007 08:12:33.914900 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/831e9d9a-5dbe-4bde-bba5-e5eec53bbe9d-combined-ca-bundle\") pod \"glance-db-sync-frvnr\" (UID: \"831e9d9a-5dbe-4bde-bba5-e5eec53bbe9d\") " pod="openstack/glance-db-sync-frvnr" Oct 07 08:12:33 crc kubenswrapper[4875]: I1007 08:12:33.915060 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xmbcq\" (UniqueName: \"kubernetes.io/projected/831e9d9a-5dbe-4bde-bba5-e5eec53bbe9d-kube-api-access-xmbcq\") pod \"glance-db-sync-frvnr\" (UID: \"831e9d9a-5dbe-4bde-bba5-e5eec53bbe9d\") " pod="openstack/glance-db-sync-frvnr" Oct 07 08:12:33 crc kubenswrapper[4875]: I1007 08:12:33.915096 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/831e9d9a-5dbe-4bde-bba5-e5eec53bbe9d-config-data\") pod \"glance-db-sync-frvnr\" (UID: \"831e9d9a-5dbe-4bde-bba5-e5eec53bbe9d\") " pod="openstack/glance-db-sync-frvnr" Oct 07 08:12:33 crc kubenswrapper[4875]: I1007 08:12:33.919798 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/831e9d9a-5dbe-4bde-bba5-e5eec53bbe9d-db-sync-config-data\") pod \"glance-db-sync-frvnr\" (UID: \"831e9d9a-5dbe-4bde-bba5-e5eec53bbe9d\") " pod="openstack/glance-db-sync-frvnr" Oct 07 08:12:33 crc kubenswrapper[4875]: I1007 08:12:33.920444 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/831e9d9a-5dbe-4bde-bba5-e5eec53bbe9d-config-data\") pod \"glance-db-sync-frvnr\" (UID: \"831e9d9a-5dbe-4bde-bba5-e5eec53bbe9d\") " pod="openstack/glance-db-sync-frvnr" Oct 07 08:12:33 crc kubenswrapper[4875]: I1007 08:12:33.921402 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/831e9d9a-5dbe-4bde-bba5-e5eec53bbe9d-combined-ca-bundle\") pod \"glance-db-sync-frvnr\" (UID: \"831e9d9a-5dbe-4bde-bba5-e5eec53bbe9d\") " pod="openstack/glance-db-sync-frvnr" Oct 07 08:12:33 crc kubenswrapper[4875]: I1007 08:12:33.932575 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xmbcq\" (UniqueName: \"kubernetes.io/projected/831e9d9a-5dbe-4bde-bba5-e5eec53bbe9d-kube-api-access-xmbcq\") pod \"glance-db-sync-frvnr\" (UID: \"831e9d9a-5dbe-4bde-bba5-e5eec53bbe9d\") " pod="openstack/glance-db-sync-frvnr" Oct 07 08:12:34 crc kubenswrapper[4875]: I1007 08:12:34.074939 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-npwcn-config-df5xs"] Oct 07 08:12:34 crc kubenswrapper[4875]: I1007 08:12:34.076265 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-npwcn-config-df5xs" Oct 07 08:12:34 crc kubenswrapper[4875]: I1007 08:12:34.079093 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-extra-scripts" Oct 07 08:12:34 crc kubenswrapper[4875]: I1007 08:12:34.087220 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-npwcn-config-df5xs"] Oct 07 08:12:34 crc kubenswrapper[4875]: I1007 08:12:34.117716 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j6pcq\" (UniqueName: \"kubernetes.io/projected/f1580779-a5ae-40a8-81df-d4376262c613-kube-api-access-j6pcq\") pod \"ovn-controller-npwcn-config-df5xs\" (UID: \"f1580779-a5ae-40a8-81df-d4376262c613\") " pod="openstack/ovn-controller-npwcn-config-df5xs" Oct 07 08:12:34 crc kubenswrapper[4875]: I1007 08:12:34.117772 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/f1580779-a5ae-40a8-81df-d4376262c613-var-run-ovn\") pod \"ovn-controller-npwcn-config-df5xs\" (UID: \"f1580779-a5ae-40a8-81df-d4376262c613\") " pod="openstack/ovn-controller-npwcn-config-df5xs" Oct 07 08:12:34 crc kubenswrapper[4875]: I1007 08:12:34.117985 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/f1580779-a5ae-40a8-81df-d4376262c613-scripts\") pod \"ovn-controller-npwcn-config-df5xs\" (UID: \"f1580779-a5ae-40a8-81df-d4376262c613\") " pod="openstack/ovn-controller-npwcn-config-df5xs" Oct 07 08:12:34 crc kubenswrapper[4875]: I1007 08:12:34.118113 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/f1580779-a5ae-40a8-81df-d4376262c613-additional-scripts\") pod \"ovn-controller-npwcn-config-df5xs\" (UID: \"f1580779-a5ae-40a8-81df-d4376262c613\") " pod="openstack/ovn-controller-npwcn-config-df5xs" Oct 07 08:12:34 crc kubenswrapper[4875]: I1007 08:12:34.118196 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/f1580779-a5ae-40a8-81df-d4376262c613-var-run\") pod \"ovn-controller-npwcn-config-df5xs\" (UID: \"f1580779-a5ae-40a8-81df-d4376262c613\") " pod="openstack/ovn-controller-npwcn-config-df5xs" Oct 07 08:12:34 crc kubenswrapper[4875]: I1007 08:12:34.118293 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/f1580779-a5ae-40a8-81df-d4376262c613-var-log-ovn\") pod \"ovn-controller-npwcn-config-df5xs\" (UID: \"f1580779-a5ae-40a8-81df-d4376262c613\") " pod="openstack/ovn-controller-npwcn-config-df5xs" Oct 07 08:12:34 crc kubenswrapper[4875]: I1007 08:12:34.140850 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-frvnr" Oct 07 08:12:34 crc kubenswrapper[4875]: I1007 08:12:34.224089 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j6pcq\" (UniqueName: \"kubernetes.io/projected/f1580779-a5ae-40a8-81df-d4376262c613-kube-api-access-j6pcq\") pod \"ovn-controller-npwcn-config-df5xs\" (UID: \"f1580779-a5ae-40a8-81df-d4376262c613\") " pod="openstack/ovn-controller-npwcn-config-df5xs" Oct 07 08:12:34 crc kubenswrapper[4875]: I1007 08:12:34.224463 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/f1580779-a5ae-40a8-81df-d4376262c613-var-run-ovn\") pod \"ovn-controller-npwcn-config-df5xs\" (UID: \"f1580779-a5ae-40a8-81df-d4376262c613\") " pod="openstack/ovn-controller-npwcn-config-df5xs" Oct 07 08:12:34 crc kubenswrapper[4875]: I1007 08:12:34.224531 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/f1580779-a5ae-40a8-81df-d4376262c613-scripts\") pod \"ovn-controller-npwcn-config-df5xs\" (UID: \"f1580779-a5ae-40a8-81df-d4376262c613\") " pod="openstack/ovn-controller-npwcn-config-df5xs" Oct 07 08:12:34 crc kubenswrapper[4875]: I1007 08:12:34.224780 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/f1580779-a5ae-40a8-81df-d4376262c613-var-run-ovn\") pod \"ovn-controller-npwcn-config-df5xs\" (UID: \"f1580779-a5ae-40a8-81df-d4376262c613\") " pod="openstack/ovn-controller-npwcn-config-df5xs" Oct 07 08:12:34 crc kubenswrapper[4875]: I1007 08:12:34.225085 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/f1580779-a5ae-40a8-81df-d4376262c613-additional-scripts\") pod \"ovn-controller-npwcn-config-df5xs\" (UID: \"f1580779-a5ae-40a8-81df-d4376262c613\") " pod="openstack/ovn-controller-npwcn-config-df5xs" Oct 07 08:12:34 crc kubenswrapper[4875]: I1007 08:12:34.225145 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/f1580779-a5ae-40a8-81df-d4376262c613-var-run\") pod \"ovn-controller-npwcn-config-df5xs\" (UID: \"f1580779-a5ae-40a8-81df-d4376262c613\") " pod="openstack/ovn-controller-npwcn-config-df5xs" Oct 07 08:12:34 crc kubenswrapper[4875]: I1007 08:12:34.225258 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/f1580779-a5ae-40a8-81df-d4376262c613-var-run\") pod \"ovn-controller-npwcn-config-df5xs\" (UID: \"f1580779-a5ae-40a8-81df-d4376262c613\") " pod="openstack/ovn-controller-npwcn-config-df5xs" Oct 07 08:12:34 crc kubenswrapper[4875]: I1007 08:12:34.225271 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/f1580779-a5ae-40a8-81df-d4376262c613-var-log-ovn\") pod \"ovn-controller-npwcn-config-df5xs\" (UID: \"f1580779-a5ae-40a8-81df-d4376262c613\") " pod="openstack/ovn-controller-npwcn-config-df5xs" Oct 07 08:12:34 crc kubenswrapper[4875]: I1007 08:12:34.225379 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/f1580779-a5ae-40a8-81df-d4376262c613-var-log-ovn\") pod \"ovn-controller-npwcn-config-df5xs\" (UID: \"f1580779-a5ae-40a8-81df-d4376262c613\") " pod="openstack/ovn-controller-npwcn-config-df5xs" Oct 07 08:12:34 crc kubenswrapper[4875]: I1007 08:12:34.225799 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/f1580779-a5ae-40a8-81df-d4376262c613-additional-scripts\") pod \"ovn-controller-npwcn-config-df5xs\" (UID: \"f1580779-a5ae-40a8-81df-d4376262c613\") " pod="openstack/ovn-controller-npwcn-config-df5xs" Oct 07 08:12:34 crc kubenswrapper[4875]: I1007 08:12:34.227019 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/f1580779-a5ae-40a8-81df-d4376262c613-scripts\") pod \"ovn-controller-npwcn-config-df5xs\" (UID: \"f1580779-a5ae-40a8-81df-d4376262c613\") " pod="openstack/ovn-controller-npwcn-config-df5xs" Oct 07 08:12:34 crc kubenswrapper[4875]: I1007 08:12:34.248796 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j6pcq\" (UniqueName: \"kubernetes.io/projected/f1580779-a5ae-40a8-81df-d4376262c613-kube-api-access-j6pcq\") pod \"ovn-controller-npwcn-config-df5xs\" (UID: \"f1580779-a5ae-40a8-81df-d4376262c613\") " pod="openstack/ovn-controller-npwcn-config-df5xs" Oct 07 08:12:34 crc kubenswrapper[4875]: I1007 08:12:34.438298 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-npwcn-config-df5xs" Oct 07 08:12:34 crc kubenswrapper[4875]: I1007 08:12:34.679404 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-frvnr"] Oct 07 08:12:34 crc kubenswrapper[4875]: W1007 08:12:34.693658 4875 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod831e9d9a_5dbe_4bde_bba5_e5eec53bbe9d.slice/crio-1ad8a540e92f2a5c33f67c99ca8590f093b1d9cfd1a4726068e5dc07bc7fdffb WatchSource:0}: Error finding container 1ad8a540e92f2a5c33f67c99ca8590f093b1d9cfd1a4726068e5dc07bc7fdffb: Status 404 returned error can't find the container with id 1ad8a540e92f2a5c33f67c99ca8590f093b1d9cfd1a4726068e5dc07bc7fdffb Oct 07 08:12:34 crc kubenswrapper[4875]: I1007 08:12:34.706681 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-npwcn-config-df5xs"] Oct 07 08:12:34 crc kubenswrapper[4875]: W1007 08:12:34.713922 4875 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf1580779_a5ae_40a8_81df_d4376262c613.slice/crio-52a4d141ffd614a5d9549a8c91434f60620e40d7bed5c5260478e440e6b42284 WatchSource:0}: Error finding container 52a4d141ffd614a5d9549a8c91434f60620e40d7bed5c5260478e440e6b42284: Status 404 returned error can't find the container with id 52a4d141ffd614a5d9549a8c91434f60620e40d7bed5c5260478e440e6b42284 Oct 07 08:12:35 crc kubenswrapper[4875]: I1007 08:12:35.579677 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-frvnr" event={"ID":"831e9d9a-5dbe-4bde-bba5-e5eec53bbe9d","Type":"ContainerStarted","Data":"1ad8a540e92f2a5c33f67c99ca8590f093b1d9cfd1a4726068e5dc07bc7fdffb"} Oct 07 08:12:35 crc kubenswrapper[4875]: I1007 08:12:35.583773 4875 generic.go:334] "Generic (PLEG): container finished" podID="f1580779-a5ae-40a8-81df-d4376262c613" containerID="2c6b6ca2b005ca9e1551328abf58d560788b5aba40a402db21a8ae3a2c65da1e" exitCode=0 Oct 07 08:12:35 crc kubenswrapper[4875]: I1007 08:12:35.583899 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-npwcn-config-df5xs" event={"ID":"f1580779-a5ae-40a8-81df-d4376262c613","Type":"ContainerDied","Data":"2c6b6ca2b005ca9e1551328abf58d560788b5aba40a402db21a8ae3a2c65da1e"} Oct 07 08:12:35 crc kubenswrapper[4875]: I1007 08:12:35.584167 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-npwcn-config-df5xs" event={"ID":"f1580779-a5ae-40a8-81df-d4376262c613","Type":"ContainerStarted","Data":"52a4d141ffd614a5d9549a8c91434f60620e40d7bed5c5260478e440e6b42284"} Oct 07 08:12:36 crc kubenswrapper[4875]: I1007 08:12:36.904121 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-npwcn-config-df5xs" Oct 07 08:12:36 crc kubenswrapper[4875]: I1007 08:12:36.977676 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/f1580779-a5ae-40a8-81df-d4376262c613-var-run\") pod \"f1580779-a5ae-40a8-81df-d4376262c613\" (UID: \"f1580779-a5ae-40a8-81df-d4376262c613\") " Oct 07 08:12:36 crc kubenswrapper[4875]: I1007 08:12:36.977903 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/f1580779-a5ae-40a8-81df-d4376262c613-scripts\") pod \"f1580779-a5ae-40a8-81df-d4376262c613\" (UID: \"f1580779-a5ae-40a8-81df-d4376262c613\") " Oct 07 08:12:36 crc kubenswrapper[4875]: I1007 08:12:36.977983 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/f1580779-a5ae-40a8-81df-d4376262c613-additional-scripts\") pod \"f1580779-a5ae-40a8-81df-d4376262c613\" (UID: \"f1580779-a5ae-40a8-81df-d4376262c613\") " Oct 07 08:12:36 crc kubenswrapper[4875]: I1007 08:12:36.978057 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j6pcq\" (UniqueName: \"kubernetes.io/projected/f1580779-a5ae-40a8-81df-d4376262c613-kube-api-access-j6pcq\") pod \"f1580779-a5ae-40a8-81df-d4376262c613\" (UID: \"f1580779-a5ae-40a8-81df-d4376262c613\") " Oct 07 08:12:36 crc kubenswrapper[4875]: I1007 08:12:36.978089 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/f1580779-a5ae-40a8-81df-d4376262c613-var-log-ovn\") pod \"f1580779-a5ae-40a8-81df-d4376262c613\" (UID: \"f1580779-a5ae-40a8-81df-d4376262c613\") " Oct 07 08:12:36 crc kubenswrapper[4875]: I1007 08:12:36.978138 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/f1580779-a5ae-40a8-81df-d4376262c613-var-run-ovn\") pod \"f1580779-a5ae-40a8-81df-d4376262c613\" (UID: \"f1580779-a5ae-40a8-81df-d4376262c613\") " Oct 07 08:12:36 crc kubenswrapper[4875]: I1007 08:12:36.977784 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f1580779-a5ae-40a8-81df-d4376262c613-var-run" (OuterVolumeSpecName: "var-run") pod "f1580779-a5ae-40a8-81df-d4376262c613" (UID: "f1580779-a5ae-40a8-81df-d4376262c613"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 07 08:12:36 crc kubenswrapper[4875]: I1007 08:12:36.978439 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f1580779-a5ae-40a8-81df-d4376262c613-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "f1580779-a5ae-40a8-81df-d4376262c613" (UID: "f1580779-a5ae-40a8-81df-d4376262c613"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 07 08:12:36 crc kubenswrapper[4875]: I1007 08:12:36.980458 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f1580779-a5ae-40a8-81df-d4376262c613-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "f1580779-a5ae-40a8-81df-d4376262c613" (UID: "f1580779-a5ae-40a8-81df-d4376262c613"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 07 08:12:36 crc kubenswrapper[4875]: I1007 08:12:36.980615 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f1580779-a5ae-40a8-81df-d4376262c613-additional-scripts" (OuterVolumeSpecName: "additional-scripts") pod "f1580779-a5ae-40a8-81df-d4376262c613" (UID: "f1580779-a5ae-40a8-81df-d4376262c613"). InnerVolumeSpecName "additional-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 08:12:36 crc kubenswrapper[4875]: I1007 08:12:36.981585 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f1580779-a5ae-40a8-81df-d4376262c613-scripts" (OuterVolumeSpecName: "scripts") pod "f1580779-a5ae-40a8-81df-d4376262c613" (UID: "f1580779-a5ae-40a8-81df-d4376262c613"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 08:12:37 crc kubenswrapper[4875]: I1007 08:12:37.000306 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f1580779-a5ae-40a8-81df-d4376262c613-kube-api-access-j6pcq" (OuterVolumeSpecName: "kube-api-access-j6pcq") pod "f1580779-a5ae-40a8-81df-d4376262c613" (UID: "f1580779-a5ae-40a8-81df-d4376262c613"). InnerVolumeSpecName "kube-api-access-j6pcq". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 08:12:37 crc kubenswrapper[4875]: I1007 08:12:37.079373 4875 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/f1580779-a5ae-40a8-81df-d4376262c613-var-run-ovn\") on node \"crc\" DevicePath \"\"" Oct 07 08:12:37 crc kubenswrapper[4875]: I1007 08:12:37.079405 4875 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/f1580779-a5ae-40a8-81df-d4376262c613-var-run\") on node \"crc\" DevicePath \"\"" Oct 07 08:12:37 crc kubenswrapper[4875]: I1007 08:12:37.079413 4875 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/f1580779-a5ae-40a8-81df-d4376262c613-scripts\") on node \"crc\" DevicePath \"\"" Oct 07 08:12:37 crc kubenswrapper[4875]: I1007 08:12:37.079422 4875 reconciler_common.go:293] "Volume detached for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/f1580779-a5ae-40a8-81df-d4376262c613-additional-scripts\") on node \"crc\" DevicePath \"\"" Oct 07 08:12:37 crc kubenswrapper[4875]: I1007 08:12:37.079432 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j6pcq\" (UniqueName: \"kubernetes.io/projected/f1580779-a5ae-40a8-81df-d4376262c613-kube-api-access-j6pcq\") on node \"crc\" DevicePath \"\"" Oct 07 08:12:37 crc kubenswrapper[4875]: I1007 08:12:37.079440 4875 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/f1580779-a5ae-40a8-81df-d4376262c613-var-log-ovn\") on node \"crc\" DevicePath \"\"" Oct 07 08:12:37 crc kubenswrapper[4875]: I1007 08:12:37.602133 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-npwcn-config-df5xs" event={"ID":"f1580779-a5ae-40a8-81df-d4376262c613","Type":"ContainerDied","Data":"52a4d141ffd614a5d9549a8c91434f60620e40d7bed5c5260478e440e6b42284"} Oct 07 08:12:37 crc kubenswrapper[4875]: I1007 08:12:37.602226 4875 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="52a4d141ffd614a5d9549a8c91434f60620e40d7bed5c5260478e440e6b42284" Oct 07 08:12:37 crc kubenswrapper[4875]: I1007 08:12:37.602502 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-npwcn-config-df5xs" Oct 07 08:12:38 crc kubenswrapper[4875]: I1007 08:12:38.008260 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-9c6a-account-create-xzc6c"] Oct 07 08:12:38 crc kubenswrapper[4875]: E1007 08:12:38.008652 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f1580779-a5ae-40a8-81df-d4376262c613" containerName="ovn-config" Oct 07 08:12:38 crc kubenswrapper[4875]: I1007 08:12:38.008665 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="f1580779-a5ae-40a8-81df-d4376262c613" containerName="ovn-config" Oct 07 08:12:38 crc kubenswrapper[4875]: I1007 08:12:38.008831 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="f1580779-a5ae-40a8-81df-d4376262c613" containerName="ovn-config" Oct 07 08:12:38 crc kubenswrapper[4875]: I1007 08:12:38.009428 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-9c6a-account-create-xzc6c" Oct 07 08:12:38 crc kubenswrapper[4875]: I1007 08:12:38.011813 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-db-secret" Oct 07 08:12:38 crc kubenswrapper[4875]: I1007 08:12:38.025917 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-9c6a-account-create-xzc6c"] Oct 07 08:12:38 crc kubenswrapper[4875]: I1007 08:12:38.038028 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-npwcn-config-df5xs"] Oct 07 08:12:38 crc kubenswrapper[4875]: I1007 08:12:38.043286 4875 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-npwcn-config-df5xs"] Oct 07 08:12:38 crc kubenswrapper[4875]: I1007 08:12:38.100927 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-988zl\" (UniqueName: \"kubernetes.io/projected/cef4a289-8a9d-48de-bbc3-fa5c5a7e2e9c-kube-api-access-988zl\") pod \"keystone-9c6a-account-create-xzc6c\" (UID: \"cef4a289-8a9d-48de-bbc3-fa5c5a7e2e9c\") " pod="openstack/keystone-9c6a-account-create-xzc6c" Oct 07 08:12:38 crc kubenswrapper[4875]: I1007 08:12:38.123083 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-npwcn-config-xnf5w"] Oct 07 08:12:38 crc kubenswrapper[4875]: I1007 08:12:38.127569 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-npwcn-config-xnf5w" Oct 07 08:12:38 crc kubenswrapper[4875]: I1007 08:12:38.131379 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-extra-scripts" Oct 07 08:12:38 crc kubenswrapper[4875]: I1007 08:12:38.135224 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-npwcn-config-xnf5w"] Oct 07 08:12:38 crc kubenswrapper[4875]: I1007 08:12:38.202741 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-988zl\" (UniqueName: \"kubernetes.io/projected/cef4a289-8a9d-48de-bbc3-fa5c5a7e2e9c-kube-api-access-988zl\") pod \"keystone-9c6a-account-create-xzc6c\" (UID: \"cef4a289-8a9d-48de-bbc3-fa5c5a7e2e9c\") " pod="openstack/keystone-9c6a-account-create-xzc6c" Oct 07 08:12:38 crc kubenswrapper[4875]: I1007 08:12:38.220452 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-988zl\" (UniqueName: \"kubernetes.io/projected/cef4a289-8a9d-48de-bbc3-fa5c5a7e2e9c-kube-api-access-988zl\") pod \"keystone-9c6a-account-create-xzc6c\" (UID: \"cef4a289-8a9d-48de-bbc3-fa5c5a7e2e9c\") " pod="openstack/keystone-9c6a-account-create-xzc6c" Oct 07 08:12:38 crc kubenswrapper[4875]: I1007 08:12:38.304372 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-md9mz\" (UniqueName: \"kubernetes.io/projected/712d4730-f99a-498a-93a4-e75ec563141a-kube-api-access-md9mz\") pod \"ovn-controller-npwcn-config-xnf5w\" (UID: \"712d4730-f99a-498a-93a4-e75ec563141a\") " pod="openstack/ovn-controller-npwcn-config-xnf5w" Oct 07 08:12:38 crc kubenswrapper[4875]: I1007 08:12:38.304453 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/712d4730-f99a-498a-93a4-e75ec563141a-var-log-ovn\") pod \"ovn-controller-npwcn-config-xnf5w\" (UID: \"712d4730-f99a-498a-93a4-e75ec563141a\") " pod="openstack/ovn-controller-npwcn-config-xnf5w" Oct 07 08:12:38 crc kubenswrapper[4875]: I1007 08:12:38.304515 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/712d4730-f99a-498a-93a4-e75ec563141a-additional-scripts\") pod \"ovn-controller-npwcn-config-xnf5w\" (UID: \"712d4730-f99a-498a-93a4-e75ec563141a\") " pod="openstack/ovn-controller-npwcn-config-xnf5w" Oct 07 08:12:38 crc kubenswrapper[4875]: I1007 08:12:38.304606 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/712d4730-f99a-498a-93a4-e75ec563141a-scripts\") pod \"ovn-controller-npwcn-config-xnf5w\" (UID: \"712d4730-f99a-498a-93a4-e75ec563141a\") " pod="openstack/ovn-controller-npwcn-config-xnf5w" Oct 07 08:12:38 crc kubenswrapper[4875]: I1007 08:12:38.304653 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/712d4730-f99a-498a-93a4-e75ec563141a-var-run-ovn\") pod \"ovn-controller-npwcn-config-xnf5w\" (UID: \"712d4730-f99a-498a-93a4-e75ec563141a\") " pod="openstack/ovn-controller-npwcn-config-xnf5w" Oct 07 08:12:38 crc kubenswrapper[4875]: I1007 08:12:38.304685 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/712d4730-f99a-498a-93a4-e75ec563141a-var-run\") pod \"ovn-controller-npwcn-config-xnf5w\" (UID: \"712d4730-f99a-498a-93a4-e75ec563141a\") " pod="openstack/ovn-controller-npwcn-config-xnf5w" Oct 07 08:12:38 crc kubenswrapper[4875]: I1007 08:12:38.305505 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-68b7-account-create-xpwdw"] Oct 07 08:12:38 crc kubenswrapper[4875]: I1007 08:12:38.307499 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-68b7-account-create-xpwdw" Oct 07 08:12:38 crc kubenswrapper[4875]: I1007 08:12:38.310374 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-db-secret" Oct 07 08:12:38 crc kubenswrapper[4875]: I1007 08:12:38.313722 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-68b7-account-create-xpwdw"] Oct 07 08:12:38 crc kubenswrapper[4875]: I1007 08:12:38.330788 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-9c6a-account-create-xzc6c" Oct 07 08:12:38 crc kubenswrapper[4875]: I1007 08:12:38.407577 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/712d4730-f99a-498a-93a4-e75ec563141a-var-log-ovn\") pod \"ovn-controller-npwcn-config-xnf5w\" (UID: \"712d4730-f99a-498a-93a4-e75ec563141a\") " pod="openstack/ovn-controller-npwcn-config-xnf5w" Oct 07 08:12:38 crc kubenswrapper[4875]: I1007 08:12:38.407715 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/712d4730-f99a-498a-93a4-e75ec563141a-additional-scripts\") pod \"ovn-controller-npwcn-config-xnf5w\" (UID: \"712d4730-f99a-498a-93a4-e75ec563141a\") " pod="openstack/ovn-controller-npwcn-config-xnf5w" Oct 07 08:12:38 crc kubenswrapper[4875]: I1007 08:12:38.407801 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/712d4730-f99a-498a-93a4-e75ec563141a-scripts\") pod \"ovn-controller-npwcn-config-xnf5w\" (UID: \"712d4730-f99a-498a-93a4-e75ec563141a\") " pod="openstack/ovn-controller-npwcn-config-xnf5w" Oct 07 08:12:38 crc kubenswrapper[4875]: I1007 08:12:38.407834 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/712d4730-f99a-498a-93a4-e75ec563141a-var-run-ovn\") pod \"ovn-controller-npwcn-config-xnf5w\" (UID: \"712d4730-f99a-498a-93a4-e75ec563141a\") " pod="openstack/ovn-controller-npwcn-config-xnf5w" Oct 07 08:12:38 crc kubenswrapper[4875]: I1007 08:12:38.407857 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/712d4730-f99a-498a-93a4-e75ec563141a-var-run\") pod \"ovn-controller-npwcn-config-xnf5w\" (UID: \"712d4730-f99a-498a-93a4-e75ec563141a\") " pod="openstack/ovn-controller-npwcn-config-xnf5w" Oct 07 08:12:38 crc kubenswrapper[4875]: I1007 08:12:38.407912 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-md9mz\" (UniqueName: \"kubernetes.io/projected/712d4730-f99a-498a-93a4-e75ec563141a-kube-api-access-md9mz\") pod \"ovn-controller-npwcn-config-xnf5w\" (UID: \"712d4730-f99a-498a-93a4-e75ec563141a\") " pod="openstack/ovn-controller-npwcn-config-xnf5w" Oct 07 08:12:38 crc kubenswrapper[4875]: I1007 08:12:38.408276 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/712d4730-f99a-498a-93a4-e75ec563141a-var-run\") pod \"ovn-controller-npwcn-config-xnf5w\" (UID: \"712d4730-f99a-498a-93a4-e75ec563141a\") " pod="openstack/ovn-controller-npwcn-config-xnf5w" Oct 07 08:12:38 crc kubenswrapper[4875]: I1007 08:12:38.408278 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/712d4730-f99a-498a-93a4-e75ec563141a-var-run-ovn\") pod \"ovn-controller-npwcn-config-xnf5w\" (UID: \"712d4730-f99a-498a-93a4-e75ec563141a\") " pod="openstack/ovn-controller-npwcn-config-xnf5w" Oct 07 08:12:38 crc kubenswrapper[4875]: I1007 08:12:38.408916 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/712d4730-f99a-498a-93a4-e75ec563141a-additional-scripts\") pod \"ovn-controller-npwcn-config-xnf5w\" (UID: \"712d4730-f99a-498a-93a4-e75ec563141a\") " pod="openstack/ovn-controller-npwcn-config-xnf5w" Oct 07 08:12:38 crc kubenswrapper[4875]: I1007 08:12:38.410139 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/712d4730-f99a-498a-93a4-e75ec563141a-scripts\") pod \"ovn-controller-npwcn-config-xnf5w\" (UID: \"712d4730-f99a-498a-93a4-e75ec563141a\") " pod="openstack/ovn-controller-npwcn-config-xnf5w" Oct 07 08:12:38 crc kubenswrapper[4875]: I1007 08:12:38.410244 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/712d4730-f99a-498a-93a4-e75ec563141a-var-log-ovn\") pod \"ovn-controller-npwcn-config-xnf5w\" (UID: \"712d4730-f99a-498a-93a4-e75ec563141a\") " pod="openstack/ovn-controller-npwcn-config-xnf5w" Oct 07 08:12:38 crc kubenswrapper[4875]: I1007 08:12:38.436142 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-md9mz\" (UniqueName: \"kubernetes.io/projected/712d4730-f99a-498a-93a4-e75ec563141a-kube-api-access-md9mz\") pod \"ovn-controller-npwcn-config-xnf5w\" (UID: \"712d4730-f99a-498a-93a4-e75ec563141a\") " pod="openstack/ovn-controller-npwcn-config-xnf5w" Oct 07 08:12:38 crc kubenswrapper[4875]: I1007 08:12:38.452305 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-npwcn-config-xnf5w" Oct 07 08:12:38 crc kubenswrapper[4875]: I1007 08:12:38.509371 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ppzf2\" (UniqueName: \"kubernetes.io/projected/e50c9707-1542-4976-ac6b-97e240bc2a47-kube-api-access-ppzf2\") pod \"placement-68b7-account-create-xpwdw\" (UID: \"e50c9707-1542-4976-ac6b-97e240bc2a47\") " pod="openstack/placement-68b7-account-create-xpwdw" Oct 07 08:12:38 crc kubenswrapper[4875]: I1007 08:12:38.617387 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ppzf2\" (UniqueName: \"kubernetes.io/projected/e50c9707-1542-4976-ac6b-97e240bc2a47-kube-api-access-ppzf2\") pod \"placement-68b7-account-create-xpwdw\" (UID: \"e50c9707-1542-4976-ac6b-97e240bc2a47\") " pod="openstack/placement-68b7-account-create-xpwdw" Oct 07 08:12:38 crc kubenswrapper[4875]: I1007 08:12:38.637424 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ppzf2\" (UniqueName: \"kubernetes.io/projected/e50c9707-1542-4976-ac6b-97e240bc2a47-kube-api-access-ppzf2\") pod \"placement-68b7-account-create-xpwdw\" (UID: \"e50c9707-1542-4976-ac6b-97e240bc2a47\") " pod="openstack/placement-68b7-account-create-xpwdw" Oct 07 08:12:38 crc kubenswrapper[4875]: I1007 08:12:38.711999 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-npwcn-config-xnf5w"] Oct 07 08:12:38 crc kubenswrapper[4875]: W1007 08:12:38.726320 4875 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod712d4730_f99a_498a_93a4_e75ec563141a.slice/crio-81ba69e429232ff2735d175ed50ba56553ccad44c9b44e03d6c617fc718f3e0a WatchSource:0}: Error finding container 81ba69e429232ff2735d175ed50ba56553ccad44c9b44e03d6c617fc718f3e0a: Status 404 returned error can't find the container with id 81ba69e429232ff2735d175ed50ba56553ccad44c9b44e03d6c617fc718f3e0a Oct 07 08:12:38 crc kubenswrapper[4875]: I1007 08:12:38.741779 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-npwcn" Oct 07 08:12:38 crc kubenswrapper[4875]: I1007 08:12:38.839115 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-9c6a-account-create-xzc6c"] Oct 07 08:12:38 crc kubenswrapper[4875]: W1007 08:12:38.852008 4875 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podcef4a289_8a9d_48de_bbc3_fa5c5a7e2e9c.slice/crio-59bedaad0734c042349c8c649b6beb47f064b3d5d679382a0799beb32c1d4e97 WatchSource:0}: Error finding container 59bedaad0734c042349c8c649b6beb47f064b3d5d679382a0799beb32c1d4e97: Status 404 returned error can't find the container with id 59bedaad0734c042349c8c649b6beb47f064b3d5d679382a0799beb32c1d4e97 Oct 07 08:12:38 crc kubenswrapper[4875]: I1007 08:12:38.944407 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-68b7-account-create-xpwdw" Oct 07 08:12:39 crc kubenswrapper[4875]: I1007 08:12:39.450698 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-68b7-account-create-xpwdw"] Oct 07 08:12:39 crc kubenswrapper[4875]: W1007 08:12:39.462529 4875 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode50c9707_1542_4976_ac6b_97e240bc2a47.slice/crio-de75217bc58b0c73420a84c186bfa6324211c06f0c7745e200e8904aba304772 WatchSource:0}: Error finding container de75217bc58b0c73420a84c186bfa6324211c06f0c7745e200e8904aba304772: Status 404 returned error can't find the container with id de75217bc58b0c73420a84c186bfa6324211c06f0c7745e200e8904aba304772 Oct 07 08:12:39 crc kubenswrapper[4875]: I1007 08:12:39.627391 4875 generic.go:334] "Generic (PLEG): container finished" podID="712d4730-f99a-498a-93a4-e75ec563141a" containerID="6bd552c8dd28ca6b0e5a1af9046d0827447aba7150dd3cedd129f4d1583e8b0e" exitCode=0 Oct 07 08:12:39 crc kubenswrapper[4875]: I1007 08:12:39.627643 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-npwcn-config-xnf5w" event={"ID":"712d4730-f99a-498a-93a4-e75ec563141a","Type":"ContainerDied","Data":"6bd552c8dd28ca6b0e5a1af9046d0827447aba7150dd3cedd129f4d1583e8b0e"} Oct 07 08:12:39 crc kubenswrapper[4875]: I1007 08:12:39.627681 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-npwcn-config-xnf5w" event={"ID":"712d4730-f99a-498a-93a4-e75ec563141a","Type":"ContainerStarted","Data":"81ba69e429232ff2735d175ed50ba56553ccad44c9b44e03d6c617fc718f3e0a"} Oct 07 08:12:39 crc kubenswrapper[4875]: I1007 08:12:39.631091 4875 generic.go:334] "Generic (PLEG): container finished" podID="cef4a289-8a9d-48de-bbc3-fa5c5a7e2e9c" containerID="02e6c6cc447c817cebb17e57f6cf13e32ab4ff374855f749e17e8ddd521007bc" exitCode=0 Oct 07 08:12:39 crc kubenswrapper[4875]: I1007 08:12:39.631232 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-9c6a-account-create-xzc6c" event={"ID":"cef4a289-8a9d-48de-bbc3-fa5c5a7e2e9c","Type":"ContainerDied","Data":"02e6c6cc447c817cebb17e57f6cf13e32ab4ff374855f749e17e8ddd521007bc"} Oct 07 08:12:39 crc kubenswrapper[4875]: I1007 08:12:39.631308 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-9c6a-account-create-xzc6c" event={"ID":"cef4a289-8a9d-48de-bbc3-fa5c5a7e2e9c","Type":"ContainerStarted","Data":"59bedaad0734c042349c8c649b6beb47f064b3d5d679382a0799beb32c1d4e97"} Oct 07 08:12:39 crc kubenswrapper[4875]: I1007 08:12:39.634392 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-68b7-account-create-xpwdw" event={"ID":"e50c9707-1542-4976-ac6b-97e240bc2a47","Type":"ContainerStarted","Data":"de75217bc58b0c73420a84c186bfa6324211c06f0c7745e200e8904aba304772"} Oct 07 08:12:39 crc kubenswrapper[4875]: I1007 08:12:39.708507 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f1580779-a5ae-40a8-81df-d4376262c613" path="/var/lib/kubelet/pods/f1580779-a5ae-40a8-81df-d4376262c613/volumes" Oct 07 08:12:39 crc kubenswrapper[4875]: E1007 08:12:39.946915 4875 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode50c9707_1542_4976_ac6b_97e240bc2a47.slice/crio-7bbfa485bb990a2b4c13dcfc288cd81aa9bf1af4ba88f5e48d9462350d9b3326.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode50c9707_1542_4976_ac6b_97e240bc2a47.slice/crio-conmon-7bbfa485bb990a2b4c13dcfc288cd81aa9bf1af4ba88f5e48d9462350d9b3326.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod06c908ee_f087_4e43_904f_5cc1e01a2464.slice/crio-5f71368c43c156d25b2ce09ab20d0658f6aa74db195a97c64816049fe1afdd14.scope\": RecentStats: unable to find data in memory cache]" Oct 07 08:12:40 crc kubenswrapper[4875]: I1007 08:12:40.646520 4875 generic.go:334] "Generic (PLEG): container finished" podID="06c908ee-f087-4e43-904f-5cc1e01a2464" containerID="5f71368c43c156d25b2ce09ab20d0658f6aa74db195a97c64816049fe1afdd14" exitCode=0 Oct 07 08:12:40 crc kubenswrapper[4875]: I1007 08:12:40.646893 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"06c908ee-f087-4e43-904f-5cc1e01a2464","Type":"ContainerDied","Data":"5f71368c43c156d25b2ce09ab20d0658f6aa74db195a97c64816049fe1afdd14"} Oct 07 08:12:40 crc kubenswrapper[4875]: I1007 08:12:40.650851 4875 generic.go:334] "Generic (PLEG): container finished" podID="e50c9707-1542-4976-ac6b-97e240bc2a47" containerID="7bbfa485bb990a2b4c13dcfc288cd81aa9bf1af4ba88f5e48d9462350d9b3326" exitCode=0 Oct 07 08:12:40 crc kubenswrapper[4875]: I1007 08:12:40.650921 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-68b7-account-create-xpwdw" event={"ID":"e50c9707-1542-4976-ac6b-97e240bc2a47","Type":"ContainerDied","Data":"7bbfa485bb990a2b4c13dcfc288cd81aa9bf1af4ba88f5e48d9462350d9b3326"} Oct 07 08:12:40 crc kubenswrapper[4875]: I1007 08:12:40.655456 4875 generic.go:334] "Generic (PLEG): container finished" podID="169a8fe1-831d-46f5-b939-e1507c89453e" containerID="74829d972ae0ba9f879c590cdb2f186562e49f7a189fcb7398f145b5eb9ec846" exitCode=0 Oct 07 08:12:40 crc kubenswrapper[4875]: I1007 08:12:40.655653 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"169a8fe1-831d-46f5-b939-e1507c89453e","Type":"ContainerDied","Data":"74829d972ae0ba9f879c590cdb2f186562e49f7a189fcb7398f145b5eb9ec846"} Oct 07 08:12:43 crc kubenswrapper[4875]: I1007 08:12:43.418443 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/97048952-fbe0-46f2-8163-00ec9381508b-etc-swift\") pod \"swift-storage-0\" (UID: \"97048952-fbe0-46f2-8163-00ec9381508b\") " pod="openstack/swift-storage-0" Oct 07 08:12:43 crc kubenswrapper[4875]: I1007 08:12:43.429182 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/97048952-fbe0-46f2-8163-00ec9381508b-etc-swift\") pod \"swift-storage-0\" (UID: \"97048952-fbe0-46f2-8163-00ec9381508b\") " pod="openstack/swift-storage-0" Oct 07 08:12:43 crc kubenswrapper[4875]: I1007 08:12:43.593160 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Oct 07 08:12:46 crc kubenswrapper[4875]: I1007 08:12:46.627728 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-9c6a-account-create-xzc6c" Oct 07 08:12:46 crc kubenswrapper[4875]: I1007 08:12:46.669964 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-68b7-account-create-xpwdw" Oct 07 08:12:46 crc kubenswrapper[4875]: I1007 08:12:46.692238 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-npwcn-config-xnf5w" Oct 07 08:12:46 crc kubenswrapper[4875]: I1007 08:12:46.741434 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-68b7-account-create-xpwdw" event={"ID":"e50c9707-1542-4976-ac6b-97e240bc2a47","Type":"ContainerDied","Data":"de75217bc58b0c73420a84c186bfa6324211c06f0c7745e200e8904aba304772"} Oct 07 08:12:46 crc kubenswrapper[4875]: I1007 08:12:46.741488 4875 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="de75217bc58b0c73420a84c186bfa6324211c06f0c7745e200e8904aba304772" Oct 07 08:12:46 crc kubenswrapper[4875]: I1007 08:12:46.741450 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-68b7-account-create-xpwdw" Oct 07 08:12:46 crc kubenswrapper[4875]: I1007 08:12:46.744790 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-npwcn-config-xnf5w" event={"ID":"712d4730-f99a-498a-93a4-e75ec563141a","Type":"ContainerDied","Data":"81ba69e429232ff2735d175ed50ba56553ccad44c9b44e03d6c617fc718f3e0a"} Oct 07 08:12:46 crc kubenswrapper[4875]: I1007 08:12:46.744851 4875 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="81ba69e429232ff2735d175ed50ba56553ccad44c9b44e03d6c617fc718f3e0a" Oct 07 08:12:46 crc kubenswrapper[4875]: I1007 08:12:46.744815 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-npwcn-config-xnf5w" Oct 07 08:12:46 crc kubenswrapper[4875]: I1007 08:12:46.746408 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-9c6a-account-create-xzc6c" event={"ID":"cef4a289-8a9d-48de-bbc3-fa5c5a7e2e9c","Type":"ContainerDied","Data":"59bedaad0734c042349c8c649b6beb47f064b3d5d679382a0799beb32c1d4e97"} Oct 07 08:12:46 crc kubenswrapper[4875]: I1007 08:12:46.746430 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-9c6a-account-create-xzc6c" Oct 07 08:12:46 crc kubenswrapper[4875]: I1007 08:12:46.746432 4875 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="59bedaad0734c042349c8c649b6beb47f064b3d5d679382a0799beb32c1d4e97" Oct 07 08:12:46 crc kubenswrapper[4875]: I1007 08:12:46.776056 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ppzf2\" (UniqueName: \"kubernetes.io/projected/e50c9707-1542-4976-ac6b-97e240bc2a47-kube-api-access-ppzf2\") pod \"e50c9707-1542-4976-ac6b-97e240bc2a47\" (UID: \"e50c9707-1542-4976-ac6b-97e240bc2a47\") " Oct 07 08:12:46 crc kubenswrapper[4875]: I1007 08:12:46.776104 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-988zl\" (UniqueName: \"kubernetes.io/projected/cef4a289-8a9d-48de-bbc3-fa5c5a7e2e9c-kube-api-access-988zl\") pod \"cef4a289-8a9d-48de-bbc3-fa5c5a7e2e9c\" (UID: \"cef4a289-8a9d-48de-bbc3-fa5c5a7e2e9c\") " Oct 07 08:12:46 crc kubenswrapper[4875]: I1007 08:12:46.780374 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e50c9707-1542-4976-ac6b-97e240bc2a47-kube-api-access-ppzf2" (OuterVolumeSpecName: "kube-api-access-ppzf2") pod "e50c9707-1542-4976-ac6b-97e240bc2a47" (UID: "e50c9707-1542-4976-ac6b-97e240bc2a47"). InnerVolumeSpecName "kube-api-access-ppzf2". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 08:12:46 crc kubenswrapper[4875]: I1007 08:12:46.781692 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cef4a289-8a9d-48de-bbc3-fa5c5a7e2e9c-kube-api-access-988zl" (OuterVolumeSpecName: "kube-api-access-988zl") pod "cef4a289-8a9d-48de-bbc3-fa5c5a7e2e9c" (UID: "cef4a289-8a9d-48de-bbc3-fa5c5a7e2e9c"). InnerVolumeSpecName "kube-api-access-988zl". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 08:12:46 crc kubenswrapper[4875]: I1007 08:12:46.878117 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/712d4730-f99a-498a-93a4-e75ec563141a-var-log-ovn\") pod \"712d4730-f99a-498a-93a4-e75ec563141a\" (UID: \"712d4730-f99a-498a-93a4-e75ec563141a\") " Oct 07 08:12:46 crc kubenswrapper[4875]: I1007 08:12:46.878200 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/712d4730-f99a-498a-93a4-e75ec563141a-scripts\") pod \"712d4730-f99a-498a-93a4-e75ec563141a\" (UID: \"712d4730-f99a-498a-93a4-e75ec563141a\") " Oct 07 08:12:46 crc kubenswrapper[4875]: I1007 08:12:46.878235 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/712d4730-f99a-498a-93a4-e75ec563141a-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "712d4730-f99a-498a-93a4-e75ec563141a" (UID: "712d4730-f99a-498a-93a4-e75ec563141a"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 07 08:12:46 crc kubenswrapper[4875]: I1007 08:12:46.878273 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/712d4730-f99a-498a-93a4-e75ec563141a-var-run\") pod \"712d4730-f99a-498a-93a4-e75ec563141a\" (UID: \"712d4730-f99a-498a-93a4-e75ec563141a\") " Oct 07 08:12:46 crc kubenswrapper[4875]: I1007 08:12:46.878441 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/712d4730-f99a-498a-93a4-e75ec563141a-additional-scripts\") pod \"712d4730-f99a-498a-93a4-e75ec563141a\" (UID: \"712d4730-f99a-498a-93a4-e75ec563141a\") " Oct 07 08:12:46 crc kubenswrapper[4875]: I1007 08:12:46.878449 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/712d4730-f99a-498a-93a4-e75ec563141a-var-run" (OuterVolumeSpecName: "var-run") pod "712d4730-f99a-498a-93a4-e75ec563141a" (UID: "712d4730-f99a-498a-93a4-e75ec563141a"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 07 08:12:46 crc kubenswrapper[4875]: I1007 08:12:46.878517 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-md9mz\" (UniqueName: \"kubernetes.io/projected/712d4730-f99a-498a-93a4-e75ec563141a-kube-api-access-md9mz\") pod \"712d4730-f99a-498a-93a4-e75ec563141a\" (UID: \"712d4730-f99a-498a-93a4-e75ec563141a\") " Oct 07 08:12:46 crc kubenswrapper[4875]: I1007 08:12:46.878553 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/712d4730-f99a-498a-93a4-e75ec563141a-var-run-ovn\") pod \"712d4730-f99a-498a-93a4-e75ec563141a\" (UID: \"712d4730-f99a-498a-93a4-e75ec563141a\") " Oct 07 08:12:46 crc kubenswrapper[4875]: I1007 08:12:46.878683 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/712d4730-f99a-498a-93a4-e75ec563141a-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "712d4730-f99a-498a-93a4-e75ec563141a" (UID: "712d4730-f99a-498a-93a4-e75ec563141a"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 07 08:12:46 crc kubenswrapper[4875]: I1007 08:12:46.879025 4875 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/712d4730-f99a-498a-93a4-e75ec563141a-var-log-ovn\") on node \"crc\" DevicePath \"\"" Oct 07 08:12:46 crc kubenswrapper[4875]: I1007 08:12:46.879046 4875 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/712d4730-f99a-498a-93a4-e75ec563141a-var-run\") on node \"crc\" DevicePath \"\"" Oct 07 08:12:46 crc kubenswrapper[4875]: I1007 08:12:46.879063 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ppzf2\" (UniqueName: \"kubernetes.io/projected/e50c9707-1542-4976-ac6b-97e240bc2a47-kube-api-access-ppzf2\") on node \"crc\" DevicePath \"\"" Oct 07 08:12:46 crc kubenswrapper[4875]: I1007 08:12:46.879077 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-988zl\" (UniqueName: \"kubernetes.io/projected/cef4a289-8a9d-48de-bbc3-fa5c5a7e2e9c-kube-api-access-988zl\") on node \"crc\" DevicePath \"\"" Oct 07 08:12:46 crc kubenswrapper[4875]: I1007 08:12:46.879088 4875 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/712d4730-f99a-498a-93a4-e75ec563141a-var-run-ovn\") on node \"crc\" DevicePath \"\"" Oct 07 08:12:46 crc kubenswrapper[4875]: I1007 08:12:46.879104 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/712d4730-f99a-498a-93a4-e75ec563141a-additional-scripts" (OuterVolumeSpecName: "additional-scripts") pod "712d4730-f99a-498a-93a4-e75ec563141a" (UID: "712d4730-f99a-498a-93a4-e75ec563141a"). InnerVolumeSpecName "additional-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 08:12:46 crc kubenswrapper[4875]: I1007 08:12:46.879353 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/712d4730-f99a-498a-93a4-e75ec563141a-scripts" (OuterVolumeSpecName: "scripts") pod "712d4730-f99a-498a-93a4-e75ec563141a" (UID: "712d4730-f99a-498a-93a4-e75ec563141a"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 08:12:46 crc kubenswrapper[4875]: I1007 08:12:46.883014 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/712d4730-f99a-498a-93a4-e75ec563141a-kube-api-access-md9mz" (OuterVolumeSpecName: "kube-api-access-md9mz") pod "712d4730-f99a-498a-93a4-e75ec563141a" (UID: "712d4730-f99a-498a-93a4-e75ec563141a"). InnerVolumeSpecName "kube-api-access-md9mz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 08:12:46 crc kubenswrapper[4875]: I1007 08:12:46.980628 4875 reconciler_common.go:293] "Volume detached for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/712d4730-f99a-498a-93a4-e75ec563141a-additional-scripts\") on node \"crc\" DevicePath \"\"" Oct 07 08:12:46 crc kubenswrapper[4875]: I1007 08:12:46.980675 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-md9mz\" (UniqueName: \"kubernetes.io/projected/712d4730-f99a-498a-93a4-e75ec563141a-kube-api-access-md9mz\") on node \"crc\" DevicePath \"\"" Oct 07 08:12:46 crc kubenswrapper[4875]: I1007 08:12:46.980690 4875 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/712d4730-f99a-498a-93a4-e75ec563141a-scripts\") on node \"crc\" DevicePath \"\"" Oct 07 08:12:46 crc kubenswrapper[4875]: I1007 08:12:46.984088 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-storage-0"] Oct 07 08:12:47 crc kubenswrapper[4875]: I1007 08:12:47.757751 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-frvnr" event={"ID":"831e9d9a-5dbe-4bde-bba5-e5eec53bbe9d","Type":"ContainerStarted","Data":"e2d6c24a72bbf10bf096552144f309944909c8b979386f7c99e9298a5e8b470f"} Oct 07 08:12:47 crc kubenswrapper[4875]: I1007 08:12:47.767074 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"169a8fe1-831d-46f5-b939-e1507c89453e","Type":"ContainerStarted","Data":"6f735bc5958aa92a6b5ff7e984af963a2d9883c708b83e365b76df8870a80031"} Oct 07 08:12:47 crc kubenswrapper[4875]: I1007 08:12:47.767908 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Oct 07 08:12:47 crc kubenswrapper[4875]: I1007 08:12:47.769577 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"97048952-fbe0-46f2-8163-00ec9381508b","Type":"ContainerStarted","Data":"cac5a567b6916d862738f5e4af402d3ead614e4ec6c2e815bb7795d6a17d4c93"} Oct 07 08:12:47 crc kubenswrapper[4875]: I1007 08:12:47.771795 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-npwcn-config-xnf5w"] Oct 07 08:12:47 crc kubenswrapper[4875]: I1007 08:12:47.773381 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"06c908ee-f087-4e43-904f-5cc1e01a2464","Type":"ContainerStarted","Data":"541c994fd394d6919cd0ba24e91df28102524f45378c6b8fac295753e60b09ba"} Oct 07 08:12:47 crc kubenswrapper[4875]: I1007 08:12:47.773676 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Oct 07 08:12:47 crc kubenswrapper[4875]: I1007 08:12:47.778920 4875 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-npwcn-config-xnf5w"] Oct 07 08:12:47 crc kubenswrapper[4875]: I1007 08:12:47.787970 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-db-sync-frvnr" podStartSLOduration=2.989534719 podStartE2EDuration="14.78795193s" podCreationTimestamp="2025-10-07 08:12:33 +0000 UTC" firstStartedPulling="2025-10-07 08:12:34.703203259 +0000 UTC m=+979.662973802" lastFinishedPulling="2025-10-07 08:12:46.50162047 +0000 UTC m=+991.461391013" observedRunningTime="2025-10-07 08:12:47.782042151 +0000 UTC m=+992.741812704" watchObservedRunningTime="2025-10-07 08:12:47.78795193 +0000 UTC m=+992.747722473" Oct 07 08:12:47 crc kubenswrapper[4875]: I1007 08:12:47.821946 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=-9223371962.032913 podStartE2EDuration="1m14.821861675s" podCreationTimestamp="2025-10-07 08:11:33 +0000 UTC" firstStartedPulling="2025-10-07 08:11:35.393671977 +0000 UTC m=+920.353442530" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 08:12:47.815072967 +0000 UTC m=+992.774843530" watchObservedRunningTime="2025-10-07 08:12:47.821861675 +0000 UTC m=+992.781632218" Oct 07 08:12:47 crc kubenswrapper[4875]: I1007 08:12:47.852090 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=44.4555704 podStartE2EDuration="1m14.852063501s" podCreationTimestamp="2025-10-07 08:11:33 +0000 UTC" firstStartedPulling="2025-10-07 08:11:35.959028877 +0000 UTC m=+920.918799420" lastFinishedPulling="2025-10-07 08:12:06.355521978 +0000 UTC m=+951.315292521" observedRunningTime="2025-10-07 08:12:47.84825383 +0000 UTC m=+992.808024383" watchObservedRunningTime="2025-10-07 08:12:47.852063501 +0000 UTC m=+992.811834044" Oct 07 08:12:48 crc kubenswrapper[4875]: I1007 08:12:48.801714 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"97048952-fbe0-46f2-8163-00ec9381508b","Type":"ContainerStarted","Data":"83ec57473c66aa9b81cce738892db31a483594744aa280291697309a87873a42"} Oct 07 08:12:49 crc kubenswrapper[4875]: I1007 08:12:49.709042 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="712d4730-f99a-498a-93a4-e75ec563141a" path="/var/lib/kubelet/pods/712d4730-f99a-498a-93a4-e75ec563141a/volumes" Oct 07 08:12:50 crc kubenswrapper[4875]: I1007 08:12:50.820578 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"97048952-fbe0-46f2-8163-00ec9381508b","Type":"ContainerStarted","Data":"534bc2e32c4d12a95d9641f0d003a6e79a8ce8e45b220f21bebd7f6a0bda359d"} Oct 07 08:12:50 crc kubenswrapper[4875]: I1007 08:12:50.820997 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"97048952-fbe0-46f2-8163-00ec9381508b","Type":"ContainerStarted","Data":"07f5a6024ba529fee00e7cd4594dbfced96a95abb431b7281aba16c4ca850416"} Oct 07 08:12:50 crc kubenswrapper[4875]: I1007 08:12:50.821017 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"97048952-fbe0-46f2-8163-00ec9381508b","Type":"ContainerStarted","Data":"16ecba98a54c179b5076a82f91ad0a53bd86b9e806c8e6eaab8472d16a719dd4"} Oct 07 08:12:52 crc kubenswrapper[4875]: I1007 08:12:52.841624 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"97048952-fbe0-46f2-8163-00ec9381508b","Type":"ContainerStarted","Data":"2ee4f39b8024198f1efa47337f4a820704a9fa10269d78f9b6fe63b927a89719"} Oct 07 08:12:52 crc kubenswrapper[4875]: I1007 08:12:52.842283 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"97048952-fbe0-46f2-8163-00ec9381508b","Type":"ContainerStarted","Data":"441f0acb8d349feeaf01d1a74567727841bc3523bba0d2603910eb332185c743"} Oct 07 08:12:52 crc kubenswrapper[4875]: I1007 08:12:52.842337 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"97048952-fbe0-46f2-8163-00ec9381508b","Type":"ContainerStarted","Data":"c990d5ea1cd625a269a7f0b71efab340746a0b160a7f86b8dcca0bad9fbd63de"} Oct 07 08:12:52 crc kubenswrapper[4875]: I1007 08:12:52.842351 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"97048952-fbe0-46f2-8163-00ec9381508b","Type":"ContainerStarted","Data":"73948eb54bfe9633be7fa29e35c55ce308d81c56c665d9b5165e8fcd2429b985"} Oct 07 08:12:53 crc kubenswrapper[4875]: I1007 08:12:53.853457 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"97048952-fbe0-46f2-8163-00ec9381508b","Type":"ContainerStarted","Data":"fa079eda7fe8943a7b3fe6a9a87d9803ffc6527904cf49d8e679244bed234bfa"} Oct 07 08:12:53 crc kubenswrapper[4875]: I1007 08:12:53.853809 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"97048952-fbe0-46f2-8163-00ec9381508b","Type":"ContainerStarted","Data":"c65cf546970f8416d95a347f50392db4b7bd9c628aba471bb5d42ec6cd989f71"} Oct 07 08:12:54 crc kubenswrapper[4875]: I1007 08:12:54.866775 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"97048952-fbe0-46f2-8163-00ec9381508b","Type":"ContainerStarted","Data":"b40ac48a13ae993fa854c9df451bda0c183926c52ca8ae8e7e395c538e678265"} Oct 07 08:12:54 crc kubenswrapper[4875]: I1007 08:12:54.867172 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"97048952-fbe0-46f2-8163-00ec9381508b","Type":"ContainerStarted","Data":"500dd23f8b965159e27ce7f761c0c8813df00d0c0e5b926f3b134c183c74b73d"} Oct 07 08:12:54 crc kubenswrapper[4875]: I1007 08:12:54.867186 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"97048952-fbe0-46f2-8163-00ec9381508b","Type":"ContainerStarted","Data":"b0500e2856df824bb5a3d6fa814c733a3438d7d0300d832c388dbf4404caf389"} Oct 07 08:12:54 crc kubenswrapper[4875]: I1007 08:12:54.867196 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"97048952-fbe0-46f2-8163-00ec9381508b","Type":"ContainerStarted","Data":"9bd88d0d96a3ac82872ad66dbae699fb1880fe8080aeb38ad7ca4e33ea3f3b74"} Oct 07 08:12:54 crc kubenswrapper[4875]: I1007 08:12:54.867206 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"97048952-fbe0-46f2-8163-00ec9381508b","Type":"ContainerStarted","Data":"63f9bf1f1718952b099d034925752a8450aabfb0af7b635a61d8325b08abcf4c"} Oct 07 08:12:54 crc kubenswrapper[4875]: I1007 08:12:54.903616 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-storage-0" podStartSLOduration=38.531216673 podStartE2EDuration="44.903601169s" podCreationTimestamp="2025-10-07 08:12:10 +0000 UTC" firstStartedPulling="2025-10-07 08:12:46.991128983 +0000 UTC m=+991.950899526" lastFinishedPulling="2025-10-07 08:12:53.363513439 +0000 UTC m=+998.323284022" observedRunningTime="2025-10-07 08:12:54.895290623 +0000 UTC m=+999.855061196" watchObservedRunningTime="2025-10-07 08:12:54.903601169 +0000 UTC m=+999.863371722" Oct 07 08:12:55 crc kubenswrapper[4875]: I1007 08:12:55.169866 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5c79d794d7-rt5hz"] Oct 07 08:12:55 crc kubenswrapper[4875]: E1007 08:12:55.170424 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e50c9707-1542-4976-ac6b-97e240bc2a47" containerName="mariadb-account-create" Oct 07 08:12:55 crc kubenswrapper[4875]: I1007 08:12:55.170445 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="e50c9707-1542-4976-ac6b-97e240bc2a47" containerName="mariadb-account-create" Oct 07 08:12:55 crc kubenswrapper[4875]: E1007 08:12:55.170473 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cef4a289-8a9d-48de-bbc3-fa5c5a7e2e9c" containerName="mariadb-account-create" Oct 07 08:12:55 crc kubenswrapper[4875]: I1007 08:12:55.170480 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="cef4a289-8a9d-48de-bbc3-fa5c5a7e2e9c" containerName="mariadb-account-create" Oct 07 08:12:55 crc kubenswrapper[4875]: E1007 08:12:55.170512 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="712d4730-f99a-498a-93a4-e75ec563141a" containerName="ovn-config" Oct 07 08:12:55 crc kubenswrapper[4875]: I1007 08:12:55.170518 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="712d4730-f99a-498a-93a4-e75ec563141a" containerName="ovn-config" Oct 07 08:12:55 crc kubenswrapper[4875]: I1007 08:12:55.170687 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="e50c9707-1542-4976-ac6b-97e240bc2a47" containerName="mariadb-account-create" Oct 07 08:12:55 crc kubenswrapper[4875]: I1007 08:12:55.170727 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="cef4a289-8a9d-48de-bbc3-fa5c5a7e2e9c" containerName="mariadb-account-create" Oct 07 08:12:55 crc kubenswrapper[4875]: I1007 08:12:55.170750 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="712d4730-f99a-498a-93a4-e75ec563141a" containerName="ovn-config" Oct 07 08:12:55 crc kubenswrapper[4875]: I1007 08:12:55.171763 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c79d794d7-rt5hz" Oct 07 08:12:55 crc kubenswrapper[4875]: I1007 08:12:55.174531 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-swift-storage-0" Oct 07 08:12:55 crc kubenswrapper[4875]: I1007 08:12:55.192219 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5c79d794d7-rt5hz"] Oct 07 08:12:55 crc kubenswrapper[4875]: I1007 08:12:55.342927 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dff39060-d536-46ab-8e45-b946b010bd5e-config\") pod \"dnsmasq-dns-5c79d794d7-rt5hz\" (UID: \"dff39060-d536-46ab-8e45-b946b010bd5e\") " pod="openstack/dnsmasq-dns-5c79d794d7-rt5hz" Oct 07 08:12:55 crc kubenswrapper[4875]: I1007 08:12:55.343132 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/dff39060-d536-46ab-8e45-b946b010bd5e-ovsdbserver-sb\") pod \"dnsmasq-dns-5c79d794d7-rt5hz\" (UID: \"dff39060-d536-46ab-8e45-b946b010bd5e\") " pod="openstack/dnsmasq-dns-5c79d794d7-rt5hz" Oct 07 08:12:55 crc kubenswrapper[4875]: I1007 08:12:55.343173 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bhwg6\" (UniqueName: \"kubernetes.io/projected/dff39060-d536-46ab-8e45-b946b010bd5e-kube-api-access-bhwg6\") pod \"dnsmasq-dns-5c79d794d7-rt5hz\" (UID: \"dff39060-d536-46ab-8e45-b946b010bd5e\") " pod="openstack/dnsmasq-dns-5c79d794d7-rt5hz" Oct 07 08:12:55 crc kubenswrapper[4875]: I1007 08:12:55.343340 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/dff39060-d536-46ab-8e45-b946b010bd5e-dns-swift-storage-0\") pod \"dnsmasq-dns-5c79d794d7-rt5hz\" (UID: \"dff39060-d536-46ab-8e45-b946b010bd5e\") " pod="openstack/dnsmasq-dns-5c79d794d7-rt5hz" Oct 07 08:12:55 crc kubenswrapper[4875]: I1007 08:12:55.343544 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/dff39060-d536-46ab-8e45-b946b010bd5e-ovsdbserver-nb\") pod \"dnsmasq-dns-5c79d794d7-rt5hz\" (UID: \"dff39060-d536-46ab-8e45-b946b010bd5e\") " pod="openstack/dnsmasq-dns-5c79d794d7-rt5hz" Oct 07 08:12:55 crc kubenswrapper[4875]: I1007 08:12:55.343940 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/dff39060-d536-46ab-8e45-b946b010bd5e-dns-svc\") pod \"dnsmasq-dns-5c79d794d7-rt5hz\" (UID: \"dff39060-d536-46ab-8e45-b946b010bd5e\") " pod="openstack/dnsmasq-dns-5c79d794d7-rt5hz" Oct 07 08:12:55 crc kubenswrapper[4875]: I1007 08:12:55.445051 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/dff39060-d536-46ab-8e45-b946b010bd5e-ovsdbserver-sb\") pod \"dnsmasq-dns-5c79d794d7-rt5hz\" (UID: \"dff39060-d536-46ab-8e45-b946b010bd5e\") " pod="openstack/dnsmasq-dns-5c79d794d7-rt5hz" Oct 07 08:12:55 crc kubenswrapper[4875]: I1007 08:12:55.445101 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bhwg6\" (UniqueName: \"kubernetes.io/projected/dff39060-d536-46ab-8e45-b946b010bd5e-kube-api-access-bhwg6\") pod \"dnsmasq-dns-5c79d794d7-rt5hz\" (UID: \"dff39060-d536-46ab-8e45-b946b010bd5e\") " pod="openstack/dnsmasq-dns-5c79d794d7-rt5hz" Oct 07 08:12:55 crc kubenswrapper[4875]: I1007 08:12:55.445146 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/dff39060-d536-46ab-8e45-b946b010bd5e-dns-swift-storage-0\") pod \"dnsmasq-dns-5c79d794d7-rt5hz\" (UID: \"dff39060-d536-46ab-8e45-b946b010bd5e\") " pod="openstack/dnsmasq-dns-5c79d794d7-rt5hz" Oct 07 08:12:55 crc kubenswrapper[4875]: I1007 08:12:55.446164 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/dff39060-d536-46ab-8e45-b946b010bd5e-ovsdbserver-nb\") pod \"dnsmasq-dns-5c79d794d7-rt5hz\" (UID: \"dff39060-d536-46ab-8e45-b946b010bd5e\") " pod="openstack/dnsmasq-dns-5c79d794d7-rt5hz" Oct 07 08:12:55 crc kubenswrapper[4875]: I1007 08:12:55.446100 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/dff39060-d536-46ab-8e45-b946b010bd5e-dns-swift-storage-0\") pod \"dnsmasq-dns-5c79d794d7-rt5hz\" (UID: \"dff39060-d536-46ab-8e45-b946b010bd5e\") " pod="openstack/dnsmasq-dns-5c79d794d7-rt5hz" Oct 07 08:12:55 crc kubenswrapper[4875]: I1007 08:12:55.446103 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/dff39060-d536-46ab-8e45-b946b010bd5e-ovsdbserver-sb\") pod \"dnsmasq-dns-5c79d794d7-rt5hz\" (UID: \"dff39060-d536-46ab-8e45-b946b010bd5e\") " pod="openstack/dnsmasq-dns-5c79d794d7-rt5hz" Oct 07 08:12:55 crc kubenswrapper[4875]: I1007 08:12:55.446274 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/dff39060-d536-46ab-8e45-b946b010bd5e-dns-svc\") pod \"dnsmasq-dns-5c79d794d7-rt5hz\" (UID: \"dff39060-d536-46ab-8e45-b946b010bd5e\") " pod="openstack/dnsmasq-dns-5c79d794d7-rt5hz" Oct 07 08:12:55 crc kubenswrapper[4875]: I1007 08:12:55.446525 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dff39060-d536-46ab-8e45-b946b010bd5e-config\") pod \"dnsmasq-dns-5c79d794d7-rt5hz\" (UID: \"dff39060-d536-46ab-8e45-b946b010bd5e\") " pod="openstack/dnsmasq-dns-5c79d794d7-rt5hz" Oct 07 08:12:55 crc kubenswrapper[4875]: I1007 08:12:55.447049 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/dff39060-d536-46ab-8e45-b946b010bd5e-ovsdbserver-nb\") pod \"dnsmasq-dns-5c79d794d7-rt5hz\" (UID: \"dff39060-d536-46ab-8e45-b946b010bd5e\") " pod="openstack/dnsmasq-dns-5c79d794d7-rt5hz" Oct 07 08:12:55 crc kubenswrapper[4875]: I1007 08:12:55.447343 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dff39060-d536-46ab-8e45-b946b010bd5e-config\") pod \"dnsmasq-dns-5c79d794d7-rt5hz\" (UID: \"dff39060-d536-46ab-8e45-b946b010bd5e\") " pod="openstack/dnsmasq-dns-5c79d794d7-rt5hz" Oct 07 08:12:55 crc kubenswrapper[4875]: I1007 08:12:55.447772 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/dff39060-d536-46ab-8e45-b946b010bd5e-dns-svc\") pod \"dnsmasq-dns-5c79d794d7-rt5hz\" (UID: \"dff39060-d536-46ab-8e45-b946b010bd5e\") " pod="openstack/dnsmasq-dns-5c79d794d7-rt5hz" Oct 07 08:12:55 crc kubenswrapper[4875]: I1007 08:12:55.467108 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bhwg6\" (UniqueName: \"kubernetes.io/projected/dff39060-d536-46ab-8e45-b946b010bd5e-kube-api-access-bhwg6\") pod \"dnsmasq-dns-5c79d794d7-rt5hz\" (UID: \"dff39060-d536-46ab-8e45-b946b010bd5e\") " pod="openstack/dnsmasq-dns-5c79d794d7-rt5hz" Oct 07 08:12:55 crc kubenswrapper[4875]: I1007 08:12:55.564692 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c79d794d7-rt5hz" Oct 07 08:12:55 crc kubenswrapper[4875]: I1007 08:12:55.880945 4875 generic.go:334] "Generic (PLEG): container finished" podID="831e9d9a-5dbe-4bde-bba5-e5eec53bbe9d" containerID="e2d6c24a72bbf10bf096552144f309944909c8b979386f7c99e9298a5e8b470f" exitCode=0 Oct 07 08:12:55 crc kubenswrapper[4875]: I1007 08:12:55.884595 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-frvnr" event={"ID":"831e9d9a-5dbe-4bde-bba5-e5eec53bbe9d","Type":"ContainerDied","Data":"e2d6c24a72bbf10bf096552144f309944909c8b979386f7c99e9298a5e8b470f"} Oct 07 08:12:56 crc kubenswrapper[4875]: I1007 08:12:56.095965 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5c79d794d7-rt5hz"] Oct 07 08:12:56 crc kubenswrapper[4875]: I1007 08:12:56.889044 4875 generic.go:334] "Generic (PLEG): container finished" podID="dff39060-d536-46ab-8e45-b946b010bd5e" containerID="ffdf629562c5011670071ae7b661148cd19d922420bd6a0bfda68c6b03751ee1" exitCode=0 Oct 07 08:12:56 crc kubenswrapper[4875]: I1007 08:12:56.889089 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c79d794d7-rt5hz" event={"ID":"dff39060-d536-46ab-8e45-b946b010bd5e","Type":"ContainerDied","Data":"ffdf629562c5011670071ae7b661148cd19d922420bd6a0bfda68c6b03751ee1"} Oct 07 08:12:56 crc kubenswrapper[4875]: I1007 08:12:56.889430 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c79d794d7-rt5hz" event={"ID":"dff39060-d536-46ab-8e45-b946b010bd5e","Type":"ContainerStarted","Data":"c84df084d615af5d19624114a716744beab8e517a411eafd6f95ed7e0ab99da6"} Oct 07 08:12:57 crc kubenswrapper[4875]: I1007 08:12:57.290646 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-frvnr" Oct 07 08:12:57 crc kubenswrapper[4875]: I1007 08:12:57.385084 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/831e9d9a-5dbe-4bde-bba5-e5eec53bbe9d-config-data\") pod \"831e9d9a-5dbe-4bde-bba5-e5eec53bbe9d\" (UID: \"831e9d9a-5dbe-4bde-bba5-e5eec53bbe9d\") " Oct 07 08:12:57 crc kubenswrapper[4875]: I1007 08:12:57.385645 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/831e9d9a-5dbe-4bde-bba5-e5eec53bbe9d-db-sync-config-data\") pod \"831e9d9a-5dbe-4bde-bba5-e5eec53bbe9d\" (UID: \"831e9d9a-5dbe-4bde-bba5-e5eec53bbe9d\") " Oct 07 08:12:57 crc kubenswrapper[4875]: I1007 08:12:57.385678 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/831e9d9a-5dbe-4bde-bba5-e5eec53bbe9d-combined-ca-bundle\") pod \"831e9d9a-5dbe-4bde-bba5-e5eec53bbe9d\" (UID: \"831e9d9a-5dbe-4bde-bba5-e5eec53bbe9d\") " Oct 07 08:12:57 crc kubenswrapper[4875]: I1007 08:12:57.386364 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xmbcq\" (UniqueName: \"kubernetes.io/projected/831e9d9a-5dbe-4bde-bba5-e5eec53bbe9d-kube-api-access-xmbcq\") pod \"831e9d9a-5dbe-4bde-bba5-e5eec53bbe9d\" (UID: \"831e9d9a-5dbe-4bde-bba5-e5eec53bbe9d\") " Oct 07 08:12:57 crc kubenswrapper[4875]: I1007 08:12:57.392423 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/831e9d9a-5dbe-4bde-bba5-e5eec53bbe9d-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "831e9d9a-5dbe-4bde-bba5-e5eec53bbe9d" (UID: "831e9d9a-5dbe-4bde-bba5-e5eec53bbe9d"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:12:57 crc kubenswrapper[4875]: I1007 08:12:57.393096 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/831e9d9a-5dbe-4bde-bba5-e5eec53bbe9d-kube-api-access-xmbcq" (OuterVolumeSpecName: "kube-api-access-xmbcq") pod "831e9d9a-5dbe-4bde-bba5-e5eec53bbe9d" (UID: "831e9d9a-5dbe-4bde-bba5-e5eec53bbe9d"). InnerVolumeSpecName "kube-api-access-xmbcq". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 08:12:57 crc kubenswrapper[4875]: I1007 08:12:57.432482 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/831e9d9a-5dbe-4bde-bba5-e5eec53bbe9d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "831e9d9a-5dbe-4bde-bba5-e5eec53bbe9d" (UID: "831e9d9a-5dbe-4bde-bba5-e5eec53bbe9d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:12:57 crc kubenswrapper[4875]: I1007 08:12:57.434774 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/831e9d9a-5dbe-4bde-bba5-e5eec53bbe9d-config-data" (OuterVolumeSpecName: "config-data") pod "831e9d9a-5dbe-4bde-bba5-e5eec53bbe9d" (UID: "831e9d9a-5dbe-4bde-bba5-e5eec53bbe9d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:12:57 crc kubenswrapper[4875]: I1007 08:12:57.488957 4875 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/831e9d9a-5dbe-4bde-bba5-e5eec53bbe9d-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Oct 07 08:12:57 crc kubenswrapper[4875]: I1007 08:12:57.488997 4875 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/831e9d9a-5dbe-4bde-bba5-e5eec53bbe9d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 08:12:57 crc kubenswrapper[4875]: I1007 08:12:57.489010 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xmbcq\" (UniqueName: \"kubernetes.io/projected/831e9d9a-5dbe-4bde-bba5-e5eec53bbe9d-kube-api-access-xmbcq\") on node \"crc\" DevicePath \"\"" Oct 07 08:12:57 crc kubenswrapper[4875]: I1007 08:12:57.489020 4875 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/831e9d9a-5dbe-4bde-bba5-e5eec53bbe9d-config-data\") on node \"crc\" DevicePath \"\"" Oct 07 08:12:57 crc kubenswrapper[4875]: I1007 08:12:57.899965 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c79d794d7-rt5hz" event={"ID":"dff39060-d536-46ab-8e45-b946b010bd5e","Type":"ContainerStarted","Data":"5b49dcb0879ae476637f65fa0f72262323f25354ad0a00fb3f75d00e9f9c6541"} Oct 07 08:12:57 crc kubenswrapper[4875]: I1007 08:12:57.900116 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5c79d794d7-rt5hz" Oct 07 08:12:57 crc kubenswrapper[4875]: I1007 08:12:57.901649 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-frvnr" event={"ID":"831e9d9a-5dbe-4bde-bba5-e5eec53bbe9d","Type":"ContainerDied","Data":"1ad8a540e92f2a5c33f67c99ca8590f093b1d9cfd1a4726068e5dc07bc7fdffb"} Oct 07 08:12:57 crc kubenswrapper[4875]: I1007 08:12:57.901677 4875 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1ad8a540e92f2a5c33f67c99ca8590f093b1d9cfd1a4726068e5dc07bc7fdffb" Oct 07 08:12:57 crc kubenswrapper[4875]: I1007 08:12:57.901721 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-frvnr" Oct 07 08:12:57 crc kubenswrapper[4875]: I1007 08:12:57.918814 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5c79d794d7-rt5hz" podStartSLOduration=2.918798741 podStartE2EDuration="2.918798741s" podCreationTimestamp="2025-10-07 08:12:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 08:12:57.917187579 +0000 UTC m=+1002.876958142" watchObservedRunningTime="2025-10-07 08:12:57.918798741 +0000 UTC m=+1002.878569284" Oct 07 08:12:58 crc kubenswrapper[4875]: I1007 08:12:58.359779 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5c79d794d7-rt5hz"] Oct 07 08:12:58 crc kubenswrapper[4875]: I1007 08:12:58.410203 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5f59b8f679-pgzlv"] Oct 07 08:12:58 crc kubenswrapper[4875]: E1007 08:12:58.410869 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="831e9d9a-5dbe-4bde-bba5-e5eec53bbe9d" containerName="glance-db-sync" Oct 07 08:12:58 crc kubenswrapper[4875]: I1007 08:12:58.410908 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="831e9d9a-5dbe-4bde-bba5-e5eec53bbe9d" containerName="glance-db-sync" Oct 07 08:12:58 crc kubenswrapper[4875]: I1007 08:12:58.411142 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="831e9d9a-5dbe-4bde-bba5-e5eec53bbe9d" containerName="glance-db-sync" Oct 07 08:12:58 crc kubenswrapper[4875]: I1007 08:12:58.412181 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5f59b8f679-pgzlv" Oct 07 08:12:58 crc kubenswrapper[4875]: I1007 08:12:58.436755 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5f59b8f679-pgzlv"] Oct 07 08:12:58 crc kubenswrapper[4875]: I1007 08:12:58.503851 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a3eb6579-5ad7-4817-8937-c9a6ca868920-ovsdbserver-nb\") pod \"dnsmasq-dns-5f59b8f679-pgzlv\" (UID: \"a3eb6579-5ad7-4817-8937-c9a6ca868920\") " pod="openstack/dnsmasq-dns-5f59b8f679-pgzlv" Oct 07 08:12:58 crc kubenswrapper[4875]: I1007 08:12:58.503939 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a3eb6579-5ad7-4817-8937-c9a6ca868920-dns-svc\") pod \"dnsmasq-dns-5f59b8f679-pgzlv\" (UID: \"a3eb6579-5ad7-4817-8937-c9a6ca868920\") " pod="openstack/dnsmasq-dns-5f59b8f679-pgzlv" Oct 07 08:12:58 crc kubenswrapper[4875]: I1007 08:12:58.503977 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a3eb6579-5ad7-4817-8937-c9a6ca868920-ovsdbserver-sb\") pod \"dnsmasq-dns-5f59b8f679-pgzlv\" (UID: \"a3eb6579-5ad7-4817-8937-c9a6ca868920\") " pod="openstack/dnsmasq-dns-5f59b8f679-pgzlv" Oct 07 08:12:58 crc kubenswrapper[4875]: I1007 08:12:58.504473 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/a3eb6579-5ad7-4817-8937-c9a6ca868920-dns-swift-storage-0\") pod \"dnsmasq-dns-5f59b8f679-pgzlv\" (UID: \"a3eb6579-5ad7-4817-8937-c9a6ca868920\") " pod="openstack/dnsmasq-dns-5f59b8f679-pgzlv" Oct 07 08:12:58 crc kubenswrapper[4875]: I1007 08:12:58.504530 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a3eb6579-5ad7-4817-8937-c9a6ca868920-config\") pod \"dnsmasq-dns-5f59b8f679-pgzlv\" (UID: \"a3eb6579-5ad7-4817-8937-c9a6ca868920\") " pod="openstack/dnsmasq-dns-5f59b8f679-pgzlv" Oct 07 08:12:58 crc kubenswrapper[4875]: I1007 08:12:58.504552 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8j64v\" (UniqueName: \"kubernetes.io/projected/a3eb6579-5ad7-4817-8937-c9a6ca868920-kube-api-access-8j64v\") pod \"dnsmasq-dns-5f59b8f679-pgzlv\" (UID: \"a3eb6579-5ad7-4817-8937-c9a6ca868920\") " pod="openstack/dnsmasq-dns-5f59b8f679-pgzlv" Oct 07 08:12:58 crc kubenswrapper[4875]: I1007 08:12:58.605904 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a3eb6579-5ad7-4817-8937-c9a6ca868920-config\") pod \"dnsmasq-dns-5f59b8f679-pgzlv\" (UID: \"a3eb6579-5ad7-4817-8937-c9a6ca868920\") " pod="openstack/dnsmasq-dns-5f59b8f679-pgzlv" Oct 07 08:12:58 crc kubenswrapper[4875]: I1007 08:12:58.605953 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8j64v\" (UniqueName: \"kubernetes.io/projected/a3eb6579-5ad7-4817-8937-c9a6ca868920-kube-api-access-8j64v\") pod \"dnsmasq-dns-5f59b8f679-pgzlv\" (UID: \"a3eb6579-5ad7-4817-8937-c9a6ca868920\") " pod="openstack/dnsmasq-dns-5f59b8f679-pgzlv" Oct 07 08:12:58 crc kubenswrapper[4875]: I1007 08:12:58.606022 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a3eb6579-5ad7-4817-8937-c9a6ca868920-ovsdbserver-nb\") pod \"dnsmasq-dns-5f59b8f679-pgzlv\" (UID: \"a3eb6579-5ad7-4817-8937-c9a6ca868920\") " pod="openstack/dnsmasq-dns-5f59b8f679-pgzlv" Oct 07 08:12:58 crc kubenswrapper[4875]: I1007 08:12:58.606055 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a3eb6579-5ad7-4817-8937-c9a6ca868920-dns-svc\") pod \"dnsmasq-dns-5f59b8f679-pgzlv\" (UID: \"a3eb6579-5ad7-4817-8937-c9a6ca868920\") " pod="openstack/dnsmasq-dns-5f59b8f679-pgzlv" Oct 07 08:12:58 crc kubenswrapper[4875]: I1007 08:12:58.606090 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a3eb6579-5ad7-4817-8937-c9a6ca868920-ovsdbserver-sb\") pod \"dnsmasq-dns-5f59b8f679-pgzlv\" (UID: \"a3eb6579-5ad7-4817-8937-c9a6ca868920\") " pod="openstack/dnsmasq-dns-5f59b8f679-pgzlv" Oct 07 08:12:58 crc kubenswrapper[4875]: I1007 08:12:58.606143 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/a3eb6579-5ad7-4817-8937-c9a6ca868920-dns-swift-storage-0\") pod \"dnsmasq-dns-5f59b8f679-pgzlv\" (UID: \"a3eb6579-5ad7-4817-8937-c9a6ca868920\") " pod="openstack/dnsmasq-dns-5f59b8f679-pgzlv" Oct 07 08:12:58 crc kubenswrapper[4875]: I1007 08:12:58.607053 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a3eb6579-5ad7-4817-8937-c9a6ca868920-dns-svc\") pod \"dnsmasq-dns-5f59b8f679-pgzlv\" (UID: \"a3eb6579-5ad7-4817-8937-c9a6ca868920\") " pod="openstack/dnsmasq-dns-5f59b8f679-pgzlv" Oct 07 08:12:58 crc kubenswrapper[4875]: I1007 08:12:58.607084 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a3eb6579-5ad7-4817-8937-c9a6ca868920-ovsdbserver-sb\") pod \"dnsmasq-dns-5f59b8f679-pgzlv\" (UID: \"a3eb6579-5ad7-4817-8937-c9a6ca868920\") " pod="openstack/dnsmasq-dns-5f59b8f679-pgzlv" Oct 07 08:12:58 crc kubenswrapper[4875]: I1007 08:12:58.607226 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a3eb6579-5ad7-4817-8937-c9a6ca868920-ovsdbserver-nb\") pod \"dnsmasq-dns-5f59b8f679-pgzlv\" (UID: \"a3eb6579-5ad7-4817-8937-c9a6ca868920\") " pod="openstack/dnsmasq-dns-5f59b8f679-pgzlv" Oct 07 08:12:58 crc kubenswrapper[4875]: I1007 08:12:58.607459 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/a3eb6579-5ad7-4817-8937-c9a6ca868920-dns-swift-storage-0\") pod \"dnsmasq-dns-5f59b8f679-pgzlv\" (UID: \"a3eb6579-5ad7-4817-8937-c9a6ca868920\") " pod="openstack/dnsmasq-dns-5f59b8f679-pgzlv" Oct 07 08:12:58 crc kubenswrapper[4875]: I1007 08:12:58.607685 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a3eb6579-5ad7-4817-8937-c9a6ca868920-config\") pod \"dnsmasq-dns-5f59b8f679-pgzlv\" (UID: \"a3eb6579-5ad7-4817-8937-c9a6ca868920\") " pod="openstack/dnsmasq-dns-5f59b8f679-pgzlv" Oct 07 08:12:58 crc kubenswrapper[4875]: I1007 08:12:58.629032 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8j64v\" (UniqueName: \"kubernetes.io/projected/a3eb6579-5ad7-4817-8937-c9a6ca868920-kube-api-access-8j64v\") pod \"dnsmasq-dns-5f59b8f679-pgzlv\" (UID: \"a3eb6579-5ad7-4817-8937-c9a6ca868920\") " pod="openstack/dnsmasq-dns-5f59b8f679-pgzlv" Oct 07 08:12:58 crc kubenswrapper[4875]: I1007 08:12:58.728213 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5f59b8f679-pgzlv" Oct 07 08:12:59 crc kubenswrapper[4875]: I1007 08:12:59.264660 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5f59b8f679-pgzlv"] Oct 07 08:12:59 crc kubenswrapper[4875]: I1007 08:12:59.956741 4875 generic.go:334] "Generic (PLEG): container finished" podID="a3eb6579-5ad7-4817-8937-c9a6ca868920" containerID="8a70a5f940493356b4e0cf1ffa90eb667b5433a3f21aa152a172e0c34fea6bc5" exitCode=0 Oct 07 08:12:59 crc kubenswrapper[4875]: I1007 08:12:59.957490 4875 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5c79d794d7-rt5hz" podUID="dff39060-d536-46ab-8e45-b946b010bd5e" containerName="dnsmasq-dns" containerID="cri-o://5b49dcb0879ae476637f65fa0f72262323f25354ad0a00fb3f75d00e9f9c6541" gracePeriod=10 Oct 07 08:12:59 crc kubenswrapper[4875]: I1007 08:12:59.958566 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5f59b8f679-pgzlv" event={"ID":"a3eb6579-5ad7-4817-8937-c9a6ca868920","Type":"ContainerDied","Data":"8a70a5f940493356b4e0cf1ffa90eb667b5433a3f21aa152a172e0c34fea6bc5"} Oct 07 08:12:59 crc kubenswrapper[4875]: I1007 08:12:59.958601 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5f59b8f679-pgzlv" event={"ID":"a3eb6579-5ad7-4817-8937-c9a6ca868920","Type":"ContainerStarted","Data":"7de65ece51d1df4fb4de156bfb73119b027598988dd59a56bd3836ea30e8f91c"} Oct 07 08:13:00 crc kubenswrapper[4875]: I1007 08:13:00.317133 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c79d794d7-rt5hz" Oct 07 08:13:00 crc kubenswrapper[4875]: I1007 08:13:00.344141 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dff39060-d536-46ab-8e45-b946b010bd5e-config\") pod \"dff39060-d536-46ab-8e45-b946b010bd5e\" (UID: \"dff39060-d536-46ab-8e45-b946b010bd5e\") " Oct 07 08:13:00 crc kubenswrapper[4875]: I1007 08:13:00.344187 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/dff39060-d536-46ab-8e45-b946b010bd5e-dns-svc\") pod \"dff39060-d536-46ab-8e45-b946b010bd5e\" (UID: \"dff39060-d536-46ab-8e45-b946b010bd5e\") " Oct 07 08:13:00 crc kubenswrapper[4875]: I1007 08:13:00.344251 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/dff39060-d536-46ab-8e45-b946b010bd5e-dns-swift-storage-0\") pod \"dff39060-d536-46ab-8e45-b946b010bd5e\" (UID: \"dff39060-d536-46ab-8e45-b946b010bd5e\") " Oct 07 08:13:00 crc kubenswrapper[4875]: I1007 08:13:00.344370 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bhwg6\" (UniqueName: \"kubernetes.io/projected/dff39060-d536-46ab-8e45-b946b010bd5e-kube-api-access-bhwg6\") pod \"dff39060-d536-46ab-8e45-b946b010bd5e\" (UID: \"dff39060-d536-46ab-8e45-b946b010bd5e\") " Oct 07 08:13:00 crc kubenswrapper[4875]: I1007 08:13:00.344476 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/dff39060-d536-46ab-8e45-b946b010bd5e-ovsdbserver-sb\") pod \"dff39060-d536-46ab-8e45-b946b010bd5e\" (UID: \"dff39060-d536-46ab-8e45-b946b010bd5e\") " Oct 07 08:13:00 crc kubenswrapper[4875]: I1007 08:13:00.344671 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/dff39060-d536-46ab-8e45-b946b010bd5e-ovsdbserver-nb\") pod \"dff39060-d536-46ab-8e45-b946b010bd5e\" (UID: \"dff39060-d536-46ab-8e45-b946b010bd5e\") " Oct 07 08:13:00 crc kubenswrapper[4875]: I1007 08:13:00.355176 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dff39060-d536-46ab-8e45-b946b010bd5e-kube-api-access-bhwg6" (OuterVolumeSpecName: "kube-api-access-bhwg6") pod "dff39060-d536-46ab-8e45-b946b010bd5e" (UID: "dff39060-d536-46ab-8e45-b946b010bd5e"). InnerVolumeSpecName "kube-api-access-bhwg6". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 08:13:00 crc kubenswrapper[4875]: I1007 08:13:00.409546 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dff39060-d536-46ab-8e45-b946b010bd5e-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "dff39060-d536-46ab-8e45-b946b010bd5e" (UID: "dff39060-d536-46ab-8e45-b946b010bd5e"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 08:13:00 crc kubenswrapper[4875]: I1007 08:13:00.413481 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dff39060-d536-46ab-8e45-b946b010bd5e-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "dff39060-d536-46ab-8e45-b946b010bd5e" (UID: "dff39060-d536-46ab-8e45-b946b010bd5e"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 08:13:00 crc kubenswrapper[4875]: I1007 08:13:00.417302 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dff39060-d536-46ab-8e45-b946b010bd5e-config" (OuterVolumeSpecName: "config") pod "dff39060-d536-46ab-8e45-b946b010bd5e" (UID: "dff39060-d536-46ab-8e45-b946b010bd5e"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 08:13:00 crc kubenswrapper[4875]: I1007 08:13:00.425008 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dff39060-d536-46ab-8e45-b946b010bd5e-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "dff39060-d536-46ab-8e45-b946b010bd5e" (UID: "dff39060-d536-46ab-8e45-b946b010bd5e"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 08:13:00 crc kubenswrapper[4875]: I1007 08:13:00.439512 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dff39060-d536-46ab-8e45-b946b010bd5e-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "dff39060-d536-46ab-8e45-b946b010bd5e" (UID: "dff39060-d536-46ab-8e45-b946b010bd5e"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 08:13:00 crc kubenswrapper[4875]: I1007 08:13:00.446622 4875 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/dff39060-d536-46ab-8e45-b946b010bd5e-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 07 08:13:00 crc kubenswrapper[4875]: I1007 08:13:00.446662 4875 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/dff39060-d536-46ab-8e45-b946b010bd5e-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 07 08:13:00 crc kubenswrapper[4875]: I1007 08:13:00.446678 4875 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dff39060-d536-46ab-8e45-b946b010bd5e-config\") on node \"crc\" DevicePath \"\"" Oct 07 08:13:00 crc kubenswrapper[4875]: I1007 08:13:00.446691 4875 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/dff39060-d536-46ab-8e45-b946b010bd5e-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 07 08:13:00 crc kubenswrapper[4875]: I1007 08:13:00.446704 4875 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/dff39060-d536-46ab-8e45-b946b010bd5e-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Oct 07 08:13:00 crc kubenswrapper[4875]: I1007 08:13:00.446719 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bhwg6\" (UniqueName: \"kubernetes.io/projected/dff39060-d536-46ab-8e45-b946b010bd5e-kube-api-access-bhwg6\") on node \"crc\" DevicePath \"\"" Oct 07 08:13:00 crc kubenswrapper[4875]: I1007 08:13:00.971156 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5f59b8f679-pgzlv" event={"ID":"a3eb6579-5ad7-4817-8937-c9a6ca868920","Type":"ContainerStarted","Data":"177b7130f64a1946d1b93f049b45ee9a2bb403c591d4a0c0e34b481470431644"} Oct 07 08:13:00 crc kubenswrapper[4875]: I1007 08:13:00.974538 4875 generic.go:334] "Generic (PLEG): container finished" podID="dff39060-d536-46ab-8e45-b946b010bd5e" containerID="5b49dcb0879ae476637f65fa0f72262323f25354ad0a00fb3f75d00e9f9c6541" exitCode=0 Oct 07 08:13:00 crc kubenswrapper[4875]: I1007 08:13:00.974601 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c79d794d7-rt5hz" Oct 07 08:13:00 crc kubenswrapper[4875]: I1007 08:13:00.974666 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c79d794d7-rt5hz" event={"ID":"dff39060-d536-46ab-8e45-b946b010bd5e","Type":"ContainerDied","Data":"5b49dcb0879ae476637f65fa0f72262323f25354ad0a00fb3f75d00e9f9c6541"} Oct 07 08:13:00 crc kubenswrapper[4875]: I1007 08:13:00.974758 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c79d794d7-rt5hz" event={"ID":"dff39060-d536-46ab-8e45-b946b010bd5e","Type":"ContainerDied","Data":"c84df084d615af5d19624114a716744beab8e517a411eafd6f95ed7e0ab99da6"} Oct 07 08:13:00 crc kubenswrapper[4875]: I1007 08:13:00.974802 4875 scope.go:117] "RemoveContainer" containerID="5b49dcb0879ae476637f65fa0f72262323f25354ad0a00fb3f75d00e9f9c6541" Oct 07 08:13:00 crc kubenswrapper[4875]: I1007 08:13:00.996340 4875 scope.go:117] "RemoveContainer" containerID="ffdf629562c5011670071ae7b661148cd19d922420bd6a0bfda68c6b03751ee1" Oct 07 08:13:01 crc kubenswrapper[4875]: I1007 08:13:01.007186 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5f59b8f679-pgzlv" podStartSLOduration=3.007168234 podStartE2EDuration="3.007168234s" podCreationTimestamp="2025-10-07 08:12:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 08:13:01.003313211 +0000 UTC m=+1005.963083834" watchObservedRunningTime="2025-10-07 08:13:01.007168234 +0000 UTC m=+1005.966938787" Oct 07 08:13:01 crc kubenswrapper[4875]: I1007 08:13:01.035390 4875 scope.go:117] "RemoveContainer" containerID="5b49dcb0879ae476637f65fa0f72262323f25354ad0a00fb3f75d00e9f9c6541" Oct 07 08:13:01 crc kubenswrapper[4875]: E1007 08:13:01.036061 4875 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5b49dcb0879ae476637f65fa0f72262323f25354ad0a00fb3f75d00e9f9c6541\": container with ID starting with 5b49dcb0879ae476637f65fa0f72262323f25354ad0a00fb3f75d00e9f9c6541 not found: ID does not exist" containerID="5b49dcb0879ae476637f65fa0f72262323f25354ad0a00fb3f75d00e9f9c6541" Oct 07 08:13:01 crc kubenswrapper[4875]: I1007 08:13:01.036122 4875 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5b49dcb0879ae476637f65fa0f72262323f25354ad0a00fb3f75d00e9f9c6541"} err="failed to get container status \"5b49dcb0879ae476637f65fa0f72262323f25354ad0a00fb3f75d00e9f9c6541\": rpc error: code = NotFound desc = could not find container \"5b49dcb0879ae476637f65fa0f72262323f25354ad0a00fb3f75d00e9f9c6541\": container with ID starting with 5b49dcb0879ae476637f65fa0f72262323f25354ad0a00fb3f75d00e9f9c6541 not found: ID does not exist" Oct 07 08:13:01 crc kubenswrapper[4875]: I1007 08:13:01.036155 4875 scope.go:117] "RemoveContainer" containerID="ffdf629562c5011670071ae7b661148cd19d922420bd6a0bfda68c6b03751ee1" Oct 07 08:13:01 crc kubenswrapper[4875]: E1007 08:13:01.036727 4875 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ffdf629562c5011670071ae7b661148cd19d922420bd6a0bfda68c6b03751ee1\": container with ID starting with ffdf629562c5011670071ae7b661148cd19d922420bd6a0bfda68c6b03751ee1 not found: ID does not exist" containerID="ffdf629562c5011670071ae7b661148cd19d922420bd6a0bfda68c6b03751ee1" Oct 07 08:13:01 crc kubenswrapper[4875]: I1007 08:13:01.037311 4875 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ffdf629562c5011670071ae7b661148cd19d922420bd6a0bfda68c6b03751ee1"} err="failed to get container status \"ffdf629562c5011670071ae7b661148cd19d922420bd6a0bfda68c6b03751ee1\": rpc error: code = NotFound desc = could not find container \"ffdf629562c5011670071ae7b661148cd19d922420bd6a0bfda68c6b03751ee1\": container with ID starting with ffdf629562c5011670071ae7b661148cd19d922420bd6a0bfda68c6b03751ee1 not found: ID does not exist" Oct 07 08:13:01 crc kubenswrapper[4875]: I1007 08:13:01.042692 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5c79d794d7-rt5hz"] Oct 07 08:13:01 crc kubenswrapper[4875]: I1007 08:13:01.050346 4875 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5c79d794d7-rt5hz"] Oct 07 08:13:01 crc kubenswrapper[4875]: I1007 08:13:01.711681 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dff39060-d536-46ab-8e45-b946b010bd5e" path="/var/lib/kubelet/pods/dff39060-d536-46ab-8e45-b946b010bd5e/volumes" Oct 07 08:13:01 crc kubenswrapper[4875]: I1007 08:13:01.986775 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5f59b8f679-pgzlv" Oct 07 08:13:04 crc kubenswrapper[4875]: I1007 08:13:04.914070 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Oct 07 08:13:05 crc kubenswrapper[4875]: I1007 08:13:05.305188 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Oct 07 08:13:06 crc kubenswrapper[4875]: I1007 08:13:06.629749 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-create-ngck8"] Oct 07 08:13:06 crc kubenswrapper[4875]: E1007 08:13:06.630655 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dff39060-d536-46ab-8e45-b946b010bd5e" containerName="init" Oct 07 08:13:06 crc kubenswrapper[4875]: I1007 08:13:06.630673 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="dff39060-d536-46ab-8e45-b946b010bd5e" containerName="init" Oct 07 08:13:06 crc kubenswrapper[4875]: E1007 08:13:06.630698 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dff39060-d536-46ab-8e45-b946b010bd5e" containerName="dnsmasq-dns" Oct 07 08:13:06 crc kubenswrapper[4875]: I1007 08:13:06.630706 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="dff39060-d536-46ab-8e45-b946b010bd5e" containerName="dnsmasq-dns" Oct 07 08:13:06 crc kubenswrapper[4875]: I1007 08:13:06.630971 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="dff39060-d536-46ab-8e45-b946b010bd5e" containerName="dnsmasq-dns" Oct 07 08:13:06 crc kubenswrapper[4875]: I1007 08:13:06.631680 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-ngck8" Oct 07 08:13:06 crc kubenswrapper[4875]: I1007 08:13:06.650437 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-ngck8"] Oct 07 08:13:06 crc kubenswrapper[4875]: I1007 08:13:06.672488 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4w8kz\" (UniqueName: \"kubernetes.io/projected/da04c041-724c-4f63-a7da-c69cbf663805-kube-api-access-4w8kz\") pod \"barbican-db-create-ngck8\" (UID: \"da04c041-724c-4f63-a7da-c69cbf663805\") " pod="openstack/barbican-db-create-ngck8" Oct 07 08:13:06 crc kubenswrapper[4875]: I1007 08:13:06.774515 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4w8kz\" (UniqueName: \"kubernetes.io/projected/da04c041-724c-4f63-a7da-c69cbf663805-kube-api-access-4w8kz\") pod \"barbican-db-create-ngck8\" (UID: \"da04c041-724c-4f63-a7da-c69cbf663805\") " pod="openstack/barbican-db-create-ngck8" Oct 07 08:13:06 crc kubenswrapper[4875]: I1007 08:13:06.794957 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4w8kz\" (UniqueName: \"kubernetes.io/projected/da04c041-724c-4f63-a7da-c69cbf663805-kube-api-access-4w8kz\") pod \"barbican-db-create-ngck8\" (UID: \"da04c041-724c-4f63-a7da-c69cbf663805\") " pod="openstack/barbican-db-create-ngck8" Oct 07 08:13:06 crc kubenswrapper[4875]: I1007 08:13:06.831303 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-create-rf76c"] Oct 07 08:13:06 crc kubenswrapper[4875]: I1007 08:13:06.832497 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-rf76c" Oct 07 08:13:06 crc kubenswrapper[4875]: I1007 08:13:06.848991 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-rf76c"] Oct 07 08:13:06 crc kubenswrapper[4875]: I1007 08:13:06.883867 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-897ck\" (UniqueName: \"kubernetes.io/projected/5b44db21-b46a-4439-9bca-c118bf324259-kube-api-access-897ck\") pod \"cinder-db-create-rf76c\" (UID: \"5b44db21-b46a-4439-9bca-c118bf324259\") " pod="openstack/cinder-db-create-rf76c" Oct 07 08:13:06 crc kubenswrapper[4875]: I1007 08:13:06.933276 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-create-qrfpg"] Oct 07 08:13:06 crc kubenswrapper[4875]: I1007 08:13:06.934663 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-qrfpg" Oct 07 08:13:06 crc kubenswrapper[4875]: I1007 08:13:06.954730 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-ngck8" Oct 07 08:13:06 crc kubenswrapper[4875]: I1007 08:13:06.957928 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-qrfpg"] Oct 07 08:13:06 crc kubenswrapper[4875]: I1007 08:13:06.985620 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-897ck\" (UniqueName: \"kubernetes.io/projected/5b44db21-b46a-4439-9bca-c118bf324259-kube-api-access-897ck\") pod \"cinder-db-create-rf76c\" (UID: \"5b44db21-b46a-4439-9bca-c118bf324259\") " pod="openstack/cinder-db-create-rf76c" Oct 07 08:13:06 crc kubenswrapper[4875]: I1007 08:13:06.985668 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rqfgg\" (UniqueName: \"kubernetes.io/projected/a93ef883-6507-4397-bc94-b1979e4763b1-kube-api-access-rqfgg\") pod \"neutron-db-create-qrfpg\" (UID: \"a93ef883-6507-4397-bc94-b1979e4763b1\") " pod="openstack/neutron-db-create-qrfpg" Oct 07 08:13:07 crc kubenswrapper[4875]: I1007 08:13:07.007054 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-897ck\" (UniqueName: \"kubernetes.io/projected/5b44db21-b46a-4439-9bca-c118bf324259-kube-api-access-897ck\") pod \"cinder-db-create-rf76c\" (UID: \"5b44db21-b46a-4439-9bca-c118bf324259\") " pod="openstack/cinder-db-create-rf76c" Oct 07 08:13:07 crc kubenswrapper[4875]: I1007 08:13:07.086838 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rqfgg\" (UniqueName: \"kubernetes.io/projected/a93ef883-6507-4397-bc94-b1979e4763b1-kube-api-access-rqfgg\") pod \"neutron-db-create-qrfpg\" (UID: \"a93ef883-6507-4397-bc94-b1979e4763b1\") " pod="openstack/neutron-db-create-qrfpg" Oct 07 08:13:07 crc kubenswrapper[4875]: I1007 08:13:07.106771 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-sync-jngqj"] Oct 07 08:13:07 crc kubenswrapper[4875]: I1007 08:13:07.110672 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-jngqj" Oct 07 08:13:07 crc kubenswrapper[4875]: I1007 08:13:07.116515 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Oct 07 08:13:07 crc kubenswrapper[4875]: I1007 08:13:07.117201 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Oct 07 08:13:07 crc kubenswrapper[4875]: I1007 08:13:07.117366 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-5bgfb" Oct 07 08:13:07 crc kubenswrapper[4875]: I1007 08:13:07.117415 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-jngqj"] Oct 07 08:13:07 crc kubenswrapper[4875]: I1007 08:13:07.117513 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Oct 07 08:13:07 crc kubenswrapper[4875]: I1007 08:13:07.121943 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rqfgg\" (UniqueName: \"kubernetes.io/projected/a93ef883-6507-4397-bc94-b1979e4763b1-kube-api-access-rqfgg\") pod \"neutron-db-create-qrfpg\" (UID: \"a93ef883-6507-4397-bc94-b1979e4763b1\") " pod="openstack/neutron-db-create-qrfpg" Oct 07 08:13:07 crc kubenswrapper[4875]: I1007 08:13:07.170172 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-rf76c" Oct 07 08:13:07 crc kubenswrapper[4875]: I1007 08:13:07.195430 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c5c73783-793b-408c-bef0-105a39960aa2-combined-ca-bundle\") pod \"keystone-db-sync-jngqj\" (UID: \"c5c73783-793b-408c-bef0-105a39960aa2\") " pod="openstack/keystone-db-sync-jngqj" Oct 07 08:13:07 crc kubenswrapper[4875]: I1007 08:13:07.195504 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c5c73783-793b-408c-bef0-105a39960aa2-config-data\") pod \"keystone-db-sync-jngqj\" (UID: \"c5c73783-793b-408c-bef0-105a39960aa2\") " pod="openstack/keystone-db-sync-jngqj" Oct 07 08:13:07 crc kubenswrapper[4875]: I1007 08:13:07.195704 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5w27x\" (UniqueName: \"kubernetes.io/projected/c5c73783-793b-408c-bef0-105a39960aa2-kube-api-access-5w27x\") pod \"keystone-db-sync-jngqj\" (UID: \"c5c73783-793b-408c-bef0-105a39960aa2\") " pod="openstack/keystone-db-sync-jngqj" Oct 07 08:13:07 crc kubenswrapper[4875]: I1007 08:13:07.251753 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-qrfpg" Oct 07 08:13:07 crc kubenswrapper[4875]: I1007 08:13:07.296973 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5w27x\" (UniqueName: \"kubernetes.io/projected/c5c73783-793b-408c-bef0-105a39960aa2-kube-api-access-5w27x\") pod \"keystone-db-sync-jngqj\" (UID: \"c5c73783-793b-408c-bef0-105a39960aa2\") " pod="openstack/keystone-db-sync-jngqj" Oct 07 08:13:07 crc kubenswrapper[4875]: I1007 08:13:07.297436 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c5c73783-793b-408c-bef0-105a39960aa2-combined-ca-bundle\") pod \"keystone-db-sync-jngqj\" (UID: \"c5c73783-793b-408c-bef0-105a39960aa2\") " pod="openstack/keystone-db-sync-jngqj" Oct 07 08:13:07 crc kubenswrapper[4875]: I1007 08:13:07.297473 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c5c73783-793b-408c-bef0-105a39960aa2-config-data\") pod \"keystone-db-sync-jngqj\" (UID: \"c5c73783-793b-408c-bef0-105a39960aa2\") " pod="openstack/keystone-db-sync-jngqj" Oct 07 08:13:07 crc kubenswrapper[4875]: I1007 08:13:07.303728 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c5c73783-793b-408c-bef0-105a39960aa2-combined-ca-bundle\") pod \"keystone-db-sync-jngqj\" (UID: \"c5c73783-793b-408c-bef0-105a39960aa2\") " pod="openstack/keystone-db-sync-jngqj" Oct 07 08:13:07 crc kubenswrapper[4875]: I1007 08:13:07.305517 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c5c73783-793b-408c-bef0-105a39960aa2-config-data\") pod \"keystone-db-sync-jngqj\" (UID: \"c5c73783-793b-408c-bef0-105a39960aa2\") " pod="openstack/keystone-db-sync-jngqj" Oct 07 08:13:07 crc kubenswrapper[4875]: I1007 08:13:07.313727 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5w27x\" (UniqueName: \"kubernetes.io/projected/c5c73783-793b-408c-bef0-105a39960aa2-kube-api-access-5w27x\") pod \"keystone-db-sync-jngqj\" (UID: \"c5c73783-793b-408c-bef0-105a39960aa2\") " pod="openstack/keystone-db-sync-jngqj" Oct 07 08:13:07 crc kubenswrapper[4875]: I1007 08:13:07.470564 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-jngqj" Oct 07 08:13:07 crc kubenswrapper[4875]: I1007 08:13:07.494541 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-ngck8"] Oct 07 08:13:07 crc kubenswrapper[4875]: I1007 08:13:07.641436 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-rf76c"] Oct 07 08:13:07 crc kubenswrapper[4875]: I1007 08:13:07.778757 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-qrfpg"] Oct 07 08:13:07 crc kubenswrapper[4875]: W1007 08:13:07.794433 4875 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda93ef883_6507_4397_bc94_b1979e4763b1.slice/crio-bcd2dd98249c7090f360dd8b877446bd8afeef1611a5153d6603e11e95263e0f WatchSource:0}: Error finding container bcd2dd98249c7090f360dd8b877446bd8afeef1611a5153d6603e11e95263e0f: Status 404 returned error can't find the container with id bcd2dd98249c7090f360dd8b877446bd8afeef1611a5153d6603e11e95263e0f Oct 07 08:13:07 crc kubenswrapper[4875]: I1007 08:13:07.821669 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-jngqj"] Oct 07 08:13:08 crc kubenswrapper[4875]: I1007 08:13:08.044090 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-jngqj" event={"ID":"c5c73783-793b-408c-bef0-105a39960aa2","Type":"ContainerStarted","Data":"e7b9b2cfc32fa3852bda22cb88711065088578e5b75603b5dcc05368c5d5d36b"} Oct 07 08:13:08 crc kubenswrapper[4875]: I1007 08:13:08.046298 4875 generic.go:334] "Generic (PLEG): container finished" podID="da04c041-724c-4f63-a7da-c69cbf663805" containerID="5bb5319e164fddf9e9216540479deb2613a098d73ececb4120869f45667e0ac0" exitCode=0 Oct 07 08:13:08 crc kubenswrapper[4875]: I1007 08:13:08.046400 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-ngck8" event={"ID":"da04c041-724c-4f63-a7da-c69cbf663805","Type":"ContainerDied","Data":"5bb5319e164fddf9e9216540479deb2613a098d73ececb4120869f45667e0ac0"} Oct 07 08:13:08 crc kubenswrapper[4875]: I1007 08:13:08.046434 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-ngck8" event={"ID":"da04c041-724c-4f63-a7da-c69cbf663805","Type":"ContainerStarted","Data":"3a86b62ef41d9b9e0b192beffaa74d908477fdee8730d58643ae32237eb959e0"} Oct 07 08:13:08 crc kubenswrapper[4875]: I1007 08:13:08.049057 4875 generic.go:334] "Generic (PLEG): container finished" podID="5b44db21-b46a-4439-9bca-c118bf324259" containerID="14fc663b076f60e9ee988ab1ae4dbbf94f78059f1b1a8cf7acb9531ed7a1a634" exitCode=0 Oct 07 08:13:08 crc kubenswrapper[4875]: I1007 08:13:08.049134 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-rf76c" event={"ID":"5b44db21-b46a-4439-9bca-c118bf324259","Type":"ContainerDied","Data":"14fc663b076f60e9ee988ab1ae4dbbf94f78059f1b1a8cf7acb9531ed7a1a634"} Oct 07 08:13:08 crc kubenswrapper[4875]: I1007 08:13:08.049167 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-rf76c" event={"ID":"5b44db21-b46a-4439-9bca-c118bf324259","Type":"ContainerStarted","Data":"9ccf18e5c3f93b3f554473b80072693c92cbcdee59e74422dec721725dcf7161"} Oct 07 08:13:08 crc kubenswrapper[4875]: I1007 08:13:08.050694 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-qrfpg" event={"ID":"a93ef883-6507-4397-bc94-b1979e4763b1","Type":"ContainerStarted","Data":"bcd2dd98249c7090f360dd8b877446bd8afeef1611a5153d6603e11e95263e0f"} Oct 07 08:13:08 crc kubenswrapper[4875]: I1007 08:13:08.730194 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5f59b8f679-pgzlv" Oct 07 08:13:08 crc kubenswrapper[4875]: I1007 08:13:08.788552 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-b8fbc5445-crjg7"] Oct 07 08:13:08 crc kubenswrapper[4875]: I1007 08:13:08.788821 4875 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-b8fbc5445-crjg7" podUID="16b4028b-5a2d-4e4e-85f7-a8af7e041875" containerName="dnsmasq-dns" containerID="cri-o://f4248a5ea8143bd48f696e198b89334c6dabfeff3ffc52b58b6bb604fc388db1" gracePeriod=10 Oct 07 08:13:09 crc kubenswrapper[4875]: I1007 08:13:09.074346 4875 generic.go:334] "Generic (PLEG): container finished" podID="16b4028b-5a2d-4e4e-85f7-a8af7e041875" containerID="f4248a5ea8143bd48f696e198b89334c6dabfeff3ffc52b58b6bb604fc388db1" exitCode=0 Oct 07 08:13:09 crc kubenswrapper[4875]: I1007 08:13:09.074457 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b8fbc5445-crjg7" event={"ID":"16b4028b-5a2d-4e4e-85f7-a8af7e041875","Type":"ContainerDied","Data":"f4248a5ea8143bd48f696e198b89334c6dabfeff3ffc52b58b6bb604fc388db1"} Oct 07 08:13:09 crc kubenswrapper[4875]: I1007 08:13:09.076735 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-qrfpg" event={"ID":"a93ef883-6507-4397-bc94-b1979e4763b1","Type":"ContainerDied","Data":"317e9e19e5736dc5700c8cacdf2b3086fde9220f18b8c75875f2aba331e5b1b7"} Oct 07 08:13:09 crc kubenswrapper[4875]: I1007 08:13:09.076726 4875 generic.go:334] "Generic (PLEG): container finished" podID="a93ef883-6507-4397-bc94-b1979e4763b1" containerID="317e9e19e5736dc5700c8cacdf2b3086fde9220f18b8c75875f2aba331e5b1b7" exitCode=0 Oct 07 08:13:09 crc kubenswrapper[4875]: I1007 08:13:09.289560 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b8fbc5445-crjg7" Oct 07 08:13:10 crc kubenswrapper[4875]: I1007 08:13:09.442670 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/16b4028b-5a2d-4e4e-85f7-a8af7e041875-config\") pod \"16b4028b-5a2d-4e4e-85f7-a8af7e041875\" (UID: \"16b4028b-5a2d-4e4e-85f7-a8af7e041875\") " Oct 07 08:13:10 crc kubenswrapper[4875]: I1007 08:13:09.442764 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/16b4028b-5a2d-4e4e-85f7-a8af7e041875-dns-svc\") pod \"16b4028b-5a2d-4e4e-85f7-a8af7e041875\" (UID: \"16b4028b-5a2d-4e4e-85f7-a8af7e041875\") " Oct 07 08:13:10 crc kubenswrapper[4875]: I1007 08:13:09.442821 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/16b4028b-5a2d-4e4e-85f7-a8af7e041875-ovsdbserver-nb\") pod \"16b4028b-5a2d-4e4e-85f7-a8af7e041875\" (UID: \"16b4028b-5a2d-4e4e-85f7-a8af7e041875\") " Oct 07 08:13:10 crc kubenswrapper[4875]: I1007 08:13:09.448992 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hzggs\" (UniqueName: \"kubernetes.io/projected/16b4028b-5a2d-4e4e-85f7-a8af7e041875-kube-api-access-hzggs\") pod \"16b4028b-5a2d-4e4e-85f7-a8af7e041875\" (UID: \"16b4028b-5a2d-4e4e-85f7-a8af7e041875\") " Oct 07 08:13:10 crc kubenswrapper[4875]: I1007 08:13:09.449082 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/16b4028b-5a2d-4e4e-85f7-a8af7e041875-ovsdbserver-sb\") pod \"16b4028b-5a2d-4e4e-85f7-a8af7e041875\" (UID: \"16b4028b-5a2d-4e4e-85f7-a8af7e041875\") " Oct 07 08:13:10 crc kubenswrapper[4875]: I1007 08:13:09.465515 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/16b4028b-5a2d-4e4e-85f7-a8af7e041875-kube-api-access-hzggs" (OuterVolumeSpecName: "kube-api-access-hzggs") pod "16b4028b-5a2d-4e4e-85f7-a8af7e041875" (UID: "16b4028b-5a2d-4e4e-85f7-a8af7e041875"). InnerVolumeSpecName "kube-api-access-hzggs". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 08:13:10 crc kubenswrapper[4875]: I1007 08:13:09.496239 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/16b4028b-5a2d-4e4e-85f7-a8af7e041875-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "16b4028b-5a2d-4e4e-85f7-a8af7e041875" (UID: "16b4028b-5a2d-4e4e-85f7-a8af7e041875"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 08:13:10 crc kubenswrapper[4875]: I1007 08:13:09.499489 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/16b4028b-5a2d-4e4e-85f7-a8af7e041875-config" (OuterVolumeSpecName: "config") pod "16b4028b-5a2d-4e4e-85f7-a8af7e041875" (UID: "16b4028b-5a2d-4e4e-85f7-a8af7e041875"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 08:13:10 crc kubenswrapper[4875]: I1007 08:13:09.501983 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-ngck8" Oct 07 08:13:10 crc kubenswrapper[4875]: I1007 08:13:09.503671 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/16b4028b-5a2d-4e4e-85f7-a8af7e041875-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "16b4028b-5a2d-4e4e-85f7-a8af7e041875" (UID: "16b4028b-5a2d-4e4e-85f7-a8af7e041875"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 08:13:10 crc kubenswrapper[4875]: I1007 08:13:09.507851 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/16b4028b-5a2d-4e4e-85f7-a8af7e041875-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "16b4028b-5a2d-4e4e-85f7-a8af7e041875" (UID: "16b4028b-5a2d-4e4e-85f7-a8af7e041875"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 08:13:10 crc kubenswrapper[4875]: I1007 08:13:09.509148 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-rf76c" Oct 07 08:13:10 crc kubenswrapper[4875]: I1007 08:13:09.550768 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-897ck\" (UniqueName: \"kubernetes.io/projected/5b44db21-b46a-4439-9bca-c118bf324259-kube-api-access-897ck\") pod \"5b44db21-b46a-4439-9bca-c118bf324259\" (UID: \"5b44db21-b46a-4439-9bca-c118bf324259\") " Oct 07 08:13:10 crc kubenswrapper[4875]: I1007 08:13:09.550974 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4w8kz\" (UniqueName: \"kubernetes.io/projected/da04c041-724c-4f63-a7da-c69cbf663805-kube-api-access-4w8kz\") pod \"da04c041-724c-4f63-a7da-c69cbf663805\" (UID: \"da04c041-724c-4f63-a7da-c69cbf663805\") " Oct 07 08:13:10 crc kubenswrapper[4875]: I1007 08:13:09.551358 4875 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/16b4028b-5a2d-4e4e-85f7-a8af7e041875-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 07 08:13:10 crc kubenswrapper[4875]: I1007 08:13:09.551371 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hzggs\" (UniqueName: \"kubernetes.io/projected/16b4028b-5a2d-4e4e-85f7-a8af7e041875-kube-api-access-hzggs\") on node \"crc\" DevicePath \"\"" Oct 07 08:13:10 crc kubenswrapper[4875]: I1007 08:13:09.551383 4875 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/16b4028b-5a2d-4e4e-85f7-a8af7e041875-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 07 08:13:10 crc kubenswrapper[4875]: I1007 08:13:09.551392 4875 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/16b4028b-5a2d-4e4e-85f7-a8af7e041875-config\") on node \"crc\" DevicePath \"\"" Oct 07 08:13:10 crc kubenswrapper[4875]: I1007 08:13:09.551400 4875 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/16b4028b-5a2d-4e4e-85f7-a8af7e041875-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 07 08:13:10 crc kubenswrapper[4875]: I1007 08:13:09.557189 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/da04c041-724c-4f63-a7da-c69cbf663805-kube-api-access-4w8kz" (OuterVolumeSpecName: "kube-api-access-4w8kz") pod "da04c041-724c-4f63-a7da-c69cbf663805" (UID: "da04c041-724c-4f63-a7da-c69cbf663805"). InnerVolumeSpecName "kube-api-access-4w8kz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 08:13:10 crc kubenswrapper[4875]: I1007 08:13:09.559910 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b44db21-b46a-4439-9bca-c118bf324259-kube-api-access-897ck" (OuterVolumeSpecName: "kube-api-access-897ck") pod "5b44db21-b46a-4439-9bca-c118bf324259" (UID: "5b44db21-b46a-4439-9bca-c118bf324259"). InnerVolumeSpecName "kube-api-access-897ck". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 08:13:10 crc kubenswrapper[4875]: I1007 08:13:09.653468 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-897ck\" (UniqueName: \"kubernetes.io/projected/5b44db21-b46a-4439-9bca-c118bf324259-kube-api-access-897ck\") on node \"crc\" DevicePath \"\"" Oct 07 08:13:10 crc kubenswrapper[4875]: I1007 08:13:09.653507 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4w8kz\" (UniqueName: \"kubernetes.io/projected/da04c041-724c-4f63-a7da-c69cbf663805-kube-api-access-4w8kz\") on node \"crc\" DevicePath \"\"" Oct 07 08:13:10 crc kubenswrapper[4875]: I1007 08:13:10.094689 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-ngck8" event={"ID":"da04c041-724c-4f63-a7da-c69cbf663805","Type":"ContainerDied","Data":"3a86b62ef41d9b9e0b192beffaa74d908477fdee8730d58643ae32237eb959e0"} Oct 07 08:13:10 crc kubenswrapper[4875]: I1007 08:13:10.094768 4875 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3a86b62ef41d9b9e0b192beffaa74d908477fdee8730d58643ae32237eb959e0" Oct 07 08:13:10 crc kubenswrapper[4875]: I1007 08:13:10.094706 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-ngck8" Oct 07 08:13:10 crc kubenswrapper[4875]: I1007 08:13:10.100532 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b8fbc5445-crjg7" event={"ID":"16b4028b-5a2d-4e4e-85f7-a8af7e041875","Type":"ContainerDied","Data":"e287334436cc740b51ba66a170d4644a7637b227db25cd79f5af94254c57ca26"} Oct 07 08:13:10 crc kubenswrapper[4875]: I1007 08:13:10.100593 4875 scope.go:117] "RemoveContainer" containerID="f4248a5ea8143bd48f696e198b89334c6dabfeff3ffc52b58b6bb604fc388db1" Oct 07 08:13:10 crc kubenswrapper[4875]: I1007 08:13:10.100756 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b8fbc5445-crjg7" Oct 07 08:13:10 crc kubenswrapper[4875]: I1007 08:13:10.108224 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-rf76c" event={"ID":"5b44db21-b46a-4439-9bca-c118bf324259","Type":"ContainerDied","Data":"9ccf18e5c3f93b3f554473b80072693c92cbcdee59e74422dec721725dcf7161"} Oct 07 08:13:10 crc kubenswrapper[4875]: I1007 08:13:10.108252 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-rf76c" Oct 07 08:13:10 crc kubenswrapper[4875]: I1007 08:13:10.108263 4875 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9ccf18e5c3f93b3f554473b80072693c92cbcdee59e74422dec721725dcf7161" Oct 07 08:13:10 crc kubenswrapper[4875]: I1007 08:13:10.135741 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-b8fbc5445-crjg7"] Oct 07 08:13:10 crc kubenswrapper[4875]: I1007 08:13:10.141662 4875 scope.go:117] "RemoveContainer" containerID="53acb5efdebdce4507eddff69515a368553ea6632204aebc6a8993b4d20fe71a" Oct 07 08:13:10 crc kubenswrapper[4875]: I1007 08:13:10.145093 4875 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-b8fbc5445-crjg7"] Oct 07 08:13:10 crc kubenswrapper[4875]: I1007 08:13:10.464985 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-qrfpg" Oct 07 08:13:10 crc kubenswrapper[4875]: I1007 08:13:10.566950 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rqfgg\" (UniqueName: \"kubernetes.io/projected/a93ef883-6507-4397-bc94-b1979e4763b1-kube-api-access-rqfgg\") pod \"a93ef883-6507-4397-bc94-b1979e4763b1\" (UID: \"a93ef883-6507-4397-bc94-b1979e4763b1\") " Oct 07 08:13:10 crc kubenswrapper[4875]: I1007 08:13:10.572608 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a93ef883-6507-4397-bc94-b1979e4763b1-kube-api-access-rqfgg" (OuterVolumeSpecName: "kube-api-access-rqfgg") pod "a93ef883-6507-4397-bc94-b1979e4763b1" (UID: "a93ef883-6507-4397-bc94-b1979e4763b1"). InnerVolumeSpecName "kube-api-access-rqfgg". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 08:13:10 crc kubenswrapper[4875]: I1007 08:13:10.669263 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rqfgg\" (UniqueName: \"kubernetes.io/projected/a93ef883-6507-4397-bc94-b1979e4763b1-kube-api-access-rqfgg\") on node \"crc\" DevicePath \"\"" Oct 07 08:13:11 crc kubenswrapper[4875]: I1007 08:13:11.120137 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-qrfpg" event={"ID":"a93ef883-6507-4397-bc94-b1979e4763b1","Type":"ContainerDied","Data":"bcd2dd98249c7090f360dd8b877446bd8afeef1611a5153d6603e11e95263e0f"} Oct 07 08:13:11 crc kubenswrapper[4875]: I1007 08:13:11.120598 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-qrfpg" Oct 07 08:13:11 crc kubenswrapper[4875]: I1007 08:13:11.120611 4875 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="bcd2dd98249c7090f360dd8b877446bd8afeef1611a5153d6603e11e95263e0f" Oct 07 08:13:11 crc kubenswrapper[4875]: I1007 08:13:11.711360 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="16b4028b-5a2d-4e4e-85f7-a8af7e041875" path="/var/lib/kubelet/pods/16b4028b-5a2d-4e4e-85f7-a8af7e041875/volumes" Oct 07 08:13:14 crc kubenswrapper[4875]: I1007 08:13:14.148384 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-jngqj" event={"ID":"c5c73783-793b-408c-bef0-105a39960aa2","Type":"ContainerStarted","Data":"9cf927b891434ca2be290755f3dfd77585171ed34edd68fc390897211e4dd3b8"} Oct 07 08:13:14 crc kubenswrapper[4875]: I1007 08:13:14.169561 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-db-sync-jngqj" podStartSLOduration=1.9072936569999999 podStartE2EDuration="7.16953691s" podCreationTimestamp="2025-10-07 08:13:07 +0000 UTC" firstStartedPulling="2025-10-07 08:13:07.836273435 +0000 UTC m=+1012.796043978" lastFinishedPulling="2025-10-07 08:13:13.098516688 +0000 UTC m=+1018.058287231" observedRunningTime="2025-10-07 08:13:14.168383132 +0000 UTC m=+1019.128153725" watchObservedRunningTime="2025-10-07 08:13:14.16953691 +0000 UTC m=+1019.129307463" Oct 07 08:13:16 crc kubenswrapper[4875]: I1007 08:13:16.800660 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-f53c-account-create-gm78b"] Oct 07 08:13:16 crc kubenswrapper[4875]: E1007 08:13:16.801969 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="16b4028b-5a2d-4e4e-85f7-a8af7e041875" containerName="init" Oct 07 08:13:16 crc kubenswrapper[4875]: I1007 08:13:16.802002 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="16b4028b-5a2d-4e4e-85f7-a8af7e041875" containerName="init" Oct 07 08:13:16 crc kubenswrapper[4875]: E1007 08:13:16.802048 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a93ef883-6507-4397-bc94-b1979e4763b1" containerName="mariadb-database-create" Oct 07 08:13:16 crc kubenswrapper[4875]: I1007 08:13:16.802070 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="a93ef883-6507-4397-bc94-b1979e4763b1" containerName="mariadb-database-create" Oct 07 08:13:16 crc kubenswrapper[4875]: E1007 08:13:16.802116 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5b44db21-b46a-4439-9bca-c118bf324259" containerName="mariadb-database-create" Oct 07 08:13:16 crc kubenswrapper[4875]: I1007 08:13:16.802133 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="5b44db21-b46a-4439-9bca-c118bf324259" containerName="mariadb-database-create" Oct 07 08:13:16 crc kubenswrapper[4875]: E1007 08:13:16.802174 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="da04c041-724c-4f63-a7da-c69cbf663805" containerName="mariadb-database-create" Oct 07 08:13:16 crc kubenswrapper[4875]: I1007 08:13:16.802192 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="da04c041-724c-4f63-a7da-c69cbf663805" containerName="mariadb-database-create" Oct 07 08:13:16 crc kubenswrapper[4875]: E1007 08:13:16.802223 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="16b4028b-5a2d-4e4e-85f7-a8af7e041875" containerName="dnsmasq-dns" Oct 07 08:13:16 crc kubenswrapper[4875]: I1007 08:13:16.802239 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="16b4028b-5a2d-4e4e-85f7-a8af7e041875" containerName="dnsmasq-dns" Oct 07 08:13:16 crc kubenswrapper[4875]: I1007 08:13:16.802668 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="da04c041-724c-4f63-a7da-c69cbf663805" containerName="mariadb-database-create" Oct 07 08:13:16 crc kubenswrapper[4875]: I1007 08:13:16.802724 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="16b4028b-5a2d-4e4e-85f7-a8af7e041875" containerName="dnsmasq-dns" Oct 07 08:13:16 crc kubenswrapper[4875]: I1007 08:13:16.802762 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="5b44db21-b46a-4439-9bca-c118bf324259" containerName="mariadb-database-create" Oct 07 08:13:16 crc kubenswrapper[4875]: I1007 08:13:16.802787 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="a93ef883-6507-4397-bc94-b1979e4763b1" containerName="mariadb-database-create" Oct 07 08:13:16 crc kubenswrapper[4875]: I1007 08:13:16.804162 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-f53c-account-create-gm78b" Oct 07 08:13:16 crc kubenswrapper[4875]: I1007 08:13:16.806450 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-db-secret" Oct 07 08:13:16 crc kubenswrapper[4875]: I1007 08:13:16.813544 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-f53c-account-create-gm78b"] Oct 07 08:13:16 crc kubenswrapper[4875]: I1007 08:13:16.880852 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-fcfd-account-create-l824k"] Oct 07 08:13:16 crc kubenswrapper[4875]: I1007 08:13:16.882436 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-fcfd-account-create-l824k" Oct 07 08:13:16 crc kubenswrapper[4875]: I1007 08:13:16.885226 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-db-secret" Oct 07 08:13:16 crc kubenswrapper[4875]: I1007 08:13:16.892252 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-fcfd-account-create-l824k"] Oct 07 08:13:16 crc kubenswrapper[4875]: I1007 08:13:16.900205 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bg2v9\" (UniqueName: \"kubernetes.io/projected/f9d30e8d-f3fb-4aec-8b1a-0546a89bda6d-kube-api-access-bg2v9\") pod \"cinder-fcfd-account-create-l824k\" (UID: \"f9d30e8d-f3fb-4aec-8b1a-0546a89bda6d\") " pod="openstack/cinder-fcfd-account-create-l824k" Oct 07 08:13:16 crc kubenswrapper[4875]: I1007 08:13:16.900293 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-scc9s\" (UniqueName: \"kubernetes.io/projected/be2a7797-874d-438b-a3a6-8b19d97011df-kube-api-access-scc9s\") pod \"barbican-f53c-account-create-gm78b\" (UID: \"be2a7797-874d-438b-a3a6-8b19d97011df\") " pod="openstack/barbican-f53c-account-create-gm78b" Oct 07 08:13:17 crc kubenswrapper[4875]: I1007 08:13:17.002225 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bg2v9\" (UniqueName: \"kubernetes.io/projected/f9d30e8d-f3fb-4aec-8b1a-0546a89bda6d-kube-api-access-bg2v9\") pod \"cinder-fcfd-account-create-l824k\" (UID: \"f9d30e8d-f3fb-4aec-8b1a-0546a89bda6d\") " pod="openstack/cinder-fcfd-account-create-l824k" Oct 07 08:13:17 crc kubenswrapper[4875]: I1007 08:13:17.002335 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-scc9s\" (UniqueName: \"kubernetes.io/projected/be2a7797-874d-438b-a3a6-8b19d97011df-kube-api-access-scc9s\") pod \"barbican-f53c-account-create-gm78b\" (UID: \"be2a7797-874d-438b-a3a6-8b19d97011df\") " pod="openstack/barbican-f53c-account-create-gm78b" Oct 07 08:13:17 crc kubenswrapper[4875]: I1007 08:13:17.024560 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-scc9s\" (UniqueName: \"kubernetes.io/projected/be2a7797-874d-438b-a3a6-8b19d97011df-kube-api-access-scc9s\") pod \"barbican-f53c-account-create-gm78b\" (UID: \"be2a7797-874d-438b-a3a6-8b19d97011df\") " pod="openstack/barbican-f53c-account-create-gm78b" Oct 07 08:13:17 crc kubenswrapper[4875]: I1007 08:13:17.025756 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bg2v9\" (UniqueName: \"kubernetes.io/projected/f9d30e8d-f3fb-4aec-8b1a-0546a89bda6d-kube-api-access-bg2v9\") pod \"cinder-fcfd-account-create-l824k\" (UID: \"f9d30e8d-f3fb-4aec-8b1a-0546a89bda6d\") " pod="openstack/cinder-fcfd-account-create-l824k" Oct 07 08:13:17 crc kubenswrapper[4875]: I1007 08:13:17.125223 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-f53c-account-create-gm78b" Oct 07 08:13:17 crc kubenswrapper[4875]: I1007 08:13:17.188133 4875 generic.go:334] "Generic (PLEG): container finished" podID="c5c73783-793b-408c-bef0-105a39960aa2" containerID="9cf927b891434ca2be290755f3dfd77585171ed34edd68fc390897211e4dd3b8" exitCode=0 Oct 07 08:13:17 crc kubenswrapper[4875]: I1007 08:13:17.188251 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-jngqj" event={"ID":"c5c73783-793b-408c-bef0-105a39960aa2","Type":"ContainerDied","Data":"9cf927b891434ca2be290755f3dfd77585171ed34edd68fc390897211e4dd3b8"} Oct 07 08:13:17 crc kubenswrapper[4875]: I1007 08:13:17.194382 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-b4f5-account-create-npspj"] Oct 07 08:13:17 crc kubenswrapper[4875]: I1007 08:13:17.196026 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-b4f5-account-create-npspj" Oct 07 08:13:17 crc kubenswrapper[4875]: I1007 08:13:17.199314 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-db-secret" Oct 07 08:13:17 crc kubenswrapper[4875]: I1007 08:13:17.200062 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-fcfd-account-create-l824k" Oct 07 08:13:17 crc kubenswrapper[4875]: I1007 08:13:17.204156 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-b4f5-account-create-npspj"] Oct 07 08:13:17 crc kubenswrapper[4875]: I1007 08:13:17.309843 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cqbjw\" (UniqueName: \"kubernetes.io/projected/03f50e66-b42a-4cf1-ba35-47b295394cab-kube-api-access-cqbjw\") pod \"neutron-b4f5-account-create-npspj\" (UID: \"03f50e66-b42a-4cf1-ba35-47b295394cab\") " pod="openstack/neutron-b4f5-account-create-npspj" Oct 07 08:13:17 crc kubenswrapper[4875]: I1007 08:13:17.412749 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqbjw\" (UniqueName: \"kubernetes.io/projected/03f50e66-b42a-4cf1-ba35-47b295394cab-kube-api-access-cqbjw\") pod \"neutron-b4f5-account-create-npspj\" (UID: \"03f50e66-b42a-4cf1-ba35-47b295394cab\") " pod="openstack/neutron-b4f5-account-create-npspj" Oct 07 08:13:17 crc kubenswrapper[4875]: I1007 08:13:17.442274 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqbjw\" (UniqueName: \"kubernetes.io/projected/03f50e66-b42a-4cf1-ba35-47b295394cab-kube-api-access-cqbjw\") pod \"neutron-b4f5-account-create-npspj\" (UID: \"03f50e66-b42a-4cf1-ba35-47b295394cab\") " pod="openstack/neutron-b4f5-account-create-npspj" Oct 07 08:13:17 crc kubenswrapper[4875]: I1007 08:13:17.607030 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-b4f5-account-create-npspj" Oct 07 08:13:17 crc kubenswrapper[4875]: I1007 08:13:17.664731 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-f53c-account-create-gm78b"] Oct 07 08:13:17 crc kubenswrapper[4875]: W1007 08:13:17.671258 4875 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbe2a7797_874d_438b_a3a6_8b19d97011df.slice/crio-163d30a7341216b93200ebab4882d0f38eae8475835bd45703403c5bd4200dd1 WatchSource:0}: Error finding container 163d30a7341216b93200ebab4882d0f38eae8475835bd45703403c5bd4200dd1: Status 404 returned error can't find the container with id 163d30a7341216b93200ebab4882d0f38eae8475835bd45703403c5bd4200dd1 Oct 07 08:13:17 crc kubenswrapper[4875]: I1007 08:13:17.780270 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-fcfd-account-create-l824k"] Oct 07 08:13:18 crc kubenswrapper[4875]: I1007 08:13:18.081781 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-b4f5-account-create-npspj"] Oct 07 08:13:18 crc kubenswrapper[4875]: W1007 08:13:18.082560 4875 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod03f50e66_b42a_4cf1_ba35_47b295394cab.slice/crio-e2a6b13a68fcd126dce731037b109685f60421f4d625ce637903bfa837223677 WatchSource:0}: Error finding container e2a6b13a68fcd126dce731037b109685f60421f4d625ce637903bfa837223677: Status 404 returned error can't find the container with id e2a6b13a68fcd126dce731037b109685f60421f4d625ce637903bfa837223677 Oct 07 08:13:18 crc kubenswrapper[4875]: I1007 08:13:18.199161 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-b4f5-account-create-npspj" event={"ID":"03f50e66-b42a-4cf1-ba35-47b295394cab","Type":"ContainerStarted","Data":"e2a6b13a68fcd126dce731037b109685f60421f4d625ce637903bfa837223677"} Oct 07 08:13:18 crc kubenswrapper[4875]: I1007 08:13:18.202780 4875 generic.go:334] "Generic (PLEG): container finished" podID="be2a7797-874d-438b-a3a6-8b19d97011df" containerID="e5ef0bbd20b650137488c0f519861274e4e023fa69487eb52d876f77086969b5" exitCode=0 Oct 07 08:13:18 crc kubenswrapper[4875]: I1007 08:13:18.202854 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-f53c-account-create-gm78b" event={"ID":"be2a7797-874d-438b-a3a6-8b19d97011df","Type":"ContainerDied","Data":"e5ef0bbd20b650137488c0f519861274e4e023fa69487eb52d876f77086969b5"} Oct 07 08:13:18 crc kubenswrapper[4875]: I1007 08:13:18.202895 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-f53c-account-create-gm78b" event={"ID":"be2a7797-874d-438b-a3a6-8b19d97011df","Type":"ContainerStarted","Data":"163d30a7341216b93200ebab4882d0f38eae8475835bd45703403c5bd4200dd1"} Oct 07 08:13:18 crc kubenswrapper[4875]: I1007 08:13:18.205505 4875 generic.go:334] "Generic (PLEG): container finished" podID="f9d30e8d-f3fb-4aec-8b1a-0546a89bda6d" containerID="ee50df3801964d8bb51d2bf86612cbd96a8d0bd531bb8175b4aa379a1ff6ab62" exitCode=0 Oct 07 08:13:18 crc kubenswrapper[4875]: I1007 08:13:18.205776 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-fcfd-account-create-l824k" event={"ID":"f9d30e8d-f3fb-4aec-8b1a-0546a89bda6d","Type":"ContainerDied","Data":"ee50df3801964d8bb51d2bf86612cbd96a8d0bd531bb8175b4aa379a1ff6ab62"} Oct 07 08:13:18 crc kubenswrapper[4875]: I1007 08:13:18.205841 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-fcfd-account-create-l824k" event={"ID":"f9d30e8d-f3fb-4aec-8b1a-0546a89bda6d","Type":"ContainerStarted","Data":"020f0cea24db571348d3f876fcd172d925c384caf642d3b26a00b45d66da5e36"} Oct 07 08:13:18 crc kubenswrapper[4875]: I1007 08:13:18.568492 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-jngqj" Oct 07 08:13:18 crc kubenswrapper[4875]: I1007 08:13:18.644144 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c5c73783-793b-408c-bef0-105a39960aa2-combined-ca-bundle\") pod \"c5c73783-793b-408c-bef0-105a39960aa2\" (UID: \"c5c73783-793b-408c-bef0-105a39960aa2\") " Oct 07 08:13:18 crc kubenswrapper[4875]: I1007 08:13:18.644238 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5w27x\" (UniqueName: \"kubernetes.io/projected/c5c73783-793b-408c-bef0-105a39960aa2-kube-api-access-5w27x\") pod \"c5c73783-793b-408c-bef0-105a39960aa2\" (UID: \"c5c73783-793b-408c-bef0-105a39960aa2\") " Oct 07 08:13:18 crc kubenswrapper[4875]: I1007 08:13:18.644289 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c5c73783-793b-408c-bef0-105a39960aa2-config-data\") pod \"c5c73783-793b-408c-bef0-105a39960aa2\" (UID: \"c5c73783-793b-408c-bef0-105a39960aa2\") " Oct 07 08:13:18 crc kubenswrapper[4875]: I1007 08:13:18.653076 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c5c73783-793b-408c-bef0-105a39960aa2-kube-api-access-5w27x" (OuterVolumeSpecName: "kube-api-access-5w27x") pod "c5c73783-793b-408c-bef0-105a39960aa2" (UID: "c5c73783-793b-408c-bef0-105a39960aa2"). InnerVolumeSpecName "kube-api-access-5w27x". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 08:13:18 crc kubenswrapper[4875]: I1007 08:13:18.677828 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c5c73783-793b-408c-bef0-105a39960aa2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c5c73783-793b-408c-bef0-105a39960aa2" (UID: "c5c73783-793b-408c-bef0-105a39960aa2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:13:18 crc kubenswrapper[4875]: I1007 08:13:18.709088 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c5c73783-793b-408c-bef0-105a39960aa2-config-data" (OuterVolumeSpecName: "config-data") pod "c5c73783-793b-408c-bef0-105a39960aa2" (UID: "c5c73783-793b-408c-bef0-105a39960aa2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:13:18 crc kubenswrapper[4875]: I1007 08:13:18.746621 4875 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c5c73783-793b-408c-bef0-105a39960aa2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 08:13:18 crc kubenswrapper[4875]: I1007 08:13:18.746647 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5w27x\" (UniqueName: \"kubernetes.io/projected/c5c73783-793b-408c-bef0-105a39960aa2-kube-api-access-5w27x\") on node \"crc\" DevicePath \"\"" Oct 07 08:13:18 crc kubenswrapper[4875]: I1007 08:13:18.746660 4875 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c5c73783-793b-408c-bef0-105a39960aa2-config-data\") on node \"crc\" DevicePath \"\"" Oct 07 08:13:19 crc kubenswrapper[4875]: I1007 08:13:19.222491 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-jngqj" Oct 07 08:13:19 crc kubenswrapper[4875]: I1007 08:13:19.222491 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-jngqj" event={"ID":"c5c73783-793b-408c-bef0-105a39960aa2","Type":"ContainerDied","Data":"e7b9b2cfc32fa3852bda22cb88711065088578e5b75603b5dcc05368c5d5d36b"} Oct 07 08:13:19 crc kubenswrapper[4875]: I1007 08:13:19.223809 4875 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e7b9b2cfc32fa3852bda22cb88711065088578e5b75603b5dcc05368c5d5d36b" Oct 07 08:13:19 crc kubenswrapper[4875]: I1007 08:13:19.226644 4875 generic.go:334] "Generic (PLEG): container finished" podID="03f50e66-b42a-4cf1-ba35-47b295394cab" containerID="b149f9e615fdf134670eb2d542b7a2c6abe01b98816fa89f0cd6473908b8abda" exitCode=0 Oct 07 08:13:19 crc kubenswrapper[4875]: I1007 08:13:19.226768 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-b4f5-account-create-npspj" event={"ID":"03f50e66-b42a-4cf1-ba35-47b295394cab","Type":"ContainerDied","Data":"b149f9e615fdf134670eb2d542b7a2c6abe01b98816fa89f0cd6473908b8abda"} Oct 07 08:13:19 crc kubenswrapper[4875]: I1007 08:13:19.470570 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-bbf5cc879-l454k"] Oct 07 08:13:19 crc kubenswrapper[4875]: E1007 08:13:19.471146 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c5c73783-793b-408c-bef0-105a39960aa2" containerName="keystone-db-sync" Oct 07 08:13:19 crc kubenswrapper[4875]: I1007 08:13:19.471171 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="c5c73783-793b-408c-bef0-105a39960aa2" containerName="keystone-db-sync" Oct 07 08:13:19 crc kubenswrapper[4875]: I1007 08:13:19.471405 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="c5c73783-793b-408c-bef0-105a39960aa2" containerName="keystone-db-sync" Oct 07 08:13:19 crc kubenswrapper[4875]: I1007 08:13:19.473028 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-bbf5cc879-l454k" Oct 07 08:13:19 crc kubenswrapper[4875]: I1007 08:13:19.484954 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-bbf5cc879-l454k"] Oct 07 08:13:19 crc kubenswrapper[4875]: I1007 08:13:19.520950 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-bkg7r"] Oct 07 08:13:19 crc kubenswrapper[4875]: I1007 08:13:19.523281 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-bkg7r" Oct 07 08:13:19 crc kubenswrapper[4875]: I1007 08:13:19.538323 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Oct 07 08:13:19 crc kubenswrapper[4875]: I1007 08:13:19.538595 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Oct 07 08:13:19 crc kubenswrapper[4875]: I1007 08:13:19.538721 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Oct 07 08:13:19 crc kubenswrapper[4875]: I1007 08:13:19.538912 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-5bgfb" Oct 07 08:13:19 crc kubenswrapper[4875]: I1007 08:13:19.550813 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-bkg7r"] Oct 07 08:13:19 crc kubenswrapper[4875]: I1007 08:13:19.563437 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/59f07b18-1f61-4d86-9231-0ed7a485b7cb-dns-swift-storage-0\") pod \"dnsmasq-dns-bbf5cc879-l454k\" (UID: \"59f07b18-1f61-4d86-9231-0ed7a485b7cb\") " pod="openstack/dnsmasq-dns-bbf5cc879-l454k" Oct 07 08:13:19 crc kubenswrapper[4875]: I1007 08:13:19.563476 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/59f07b18-1f61-4d86-9231-0ed7a485b7cb-ovsdbserver-nb\") pod \"dnsmasq-dns-bbf5cc879-l454k\" (UID: \"59f07b18-1f61-4d86-9231-0ed7a485b7cb\") " pod="openstack/dnsmasq-dns-bbf5cc879-l454k" Oct 07 08:13:19 crc kubenswrapper[4875]: I1007 08:13:19.563524 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/78c5bd94-76f8-49ab-841a-c6bfe4087ab0-scripts\") pod \"keystone-bootstrap-bkg7r\" (UID: \"78c5bd94-76f8-49ab-841a-c6bfe4087ab0\") " pod="openstack/keystone-bootstrap-bkg7r" Oct 07 08:13:19 crc kubenswrapper[4875]: I1007 08:13:19.563548 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/59f07b18-1f61-4d86-9231-0ed7a485b7cb-dns-svc\") pod \"dnsmasq-dns-bbf5cc879-l454k\" (UID: \"59f07b18-1f61-4d86-9231-0ed7a485b7cb\") " pod="openstack/dnsmasq-dns-bbf5cc879-l454k" Oct 07 08:13:19 crc kubenswrapper[4875]: I1007 08:13:19.563569 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/59f07b18-1f61-4d86-9231-0ed7a485b7cb-config\") pod \"dnsmasq-dns-bbf5cc879-l454k\" (UID: \"59f07b18-1f61-4d86-9231-0ed7a485b7cb\") " pod="openstack/dnsmasq-dns-bbf5cc879-l454k" Oct 07 08:13:19 crc kubenswrapper[4875]: I1007 08:13:19.563596 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/59f07b18-1f61-4d86-9231-0ed7a485b7cb-ovsdbserver-sb\") pod \"dnsmasq-dns-bbf5cc879-l454k\" (UID: \"59f07b18-1f61-4d86-9231-0ed7a485b7cb\") " pod="openstack/dnsmasq-dns-bbf5cc879-l454k" Oct 07 08:13:19 crc kubenswrapper[4875]: I1007 08:13:19.563618 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9czg4\" (UniqueName: \"kubernetes.io/projected/59f07b18-1f61-4d86-9231-0ed7a485b7cb-kube-api-access-9czg4\") pod \"dnsmasq-dns-bbf5cc879-l454k\" (UID: \"59f07b18-1f61-4d86-9231-0ed7a485b7cb\") " pod="openstack/dnsmasq-dns-bbf5cc879-l454k" Oct 07 08:13:19 crc kubenswrapper[4875]: I1007 08:13:19.563646 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/78c5bd94-76f8-49ab-841a-c6bfe4087ab0-fernet-keys\") pod \"keystone-bootstrap-bkg7r\" (UID: \"78c5bd94-76f8-49ab-841a-c6bfe4087ab0\") " pod="openstack/keystone-bootstrap-bkg7r" Oct 07 08:13:19 crc kubenswrapper[4875]: I1007 08:13:19.563670 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/78c5bd94-76f8-49ab-841a-c6bfe4087ab0-credential-keys\") pod \"keystone-bootstrap-bkg7r\" (UID: \"78c5bd94-76f8-49ab-841a-c6bfe4087ab0\") " pod="openstack/keystone-bootstrap-bkg7r" Oct 07 08:13:19 crc kubenswrapper[4875]: I1007 08:13:19.563686 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nhdrf\" (UniqueName: \"kubernetes.io/projected/78c5bd94-76f8-49ab-841a-c6bfe4087ab0-kube-api-access-nhdrf\") pod \"keystone-bootstrap-bkg7r\" (UID: \"78c5bd94-76f8-49ab-841a-c6bfe4087ab0\") " pod="openstack/keystone-bootstrap-bkg7r" Oct 07 08:13:19 crc kubenswrapper[4875]: I1007 08:13:19.563714 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/78c5bd94-76f8-49ab-841a-c6bfe4087ab0-combined-ca-bundle\") pod \"keystone-bootstrap-bkg7r\" (UID: \"78c5bd94-76f8-49ab-841a-c6bfe4087ab0\") " pod="openstack/keystone-bootstrap-bkg7r" Oct 07 08:13:19 crc kubenswrapper[4875]: I1007 08:13:19.563745 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/78c5bd94-76f8-49ab-841a-c6bfe4087ab0-config-data\") pod \"keystone-bootstrap-bkg7r\" (UID: \"78c5bd94-76f8-49ab-841a-c6bfe4087ab0\") " pod="openstack/keystone-bootstrap-bkg7r" Oct 07 08:13:19 crc kubenswrapper[4875]: I1007 08:13:19.591198 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-fcfd-account-create-l824k" Oct 07 08:13:19 crc kubenswrapper[4875]: I1007 08:13:19.673266 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bg2v9\" (UniqueName: \"kubernetes.io/projected/f9d30e8d-f3fb-4aec-8b1a-0546a89bda6d-kube-api-access-bg2v9\") pod \"f9d30e8d-f3fb-4aec-8b1a-0546a89bda6d\" (UID: \"f9d30e8d-f3fb-4aec-8b1a-0546a89bda6d\") " Oct 07 08:13:19 crc kubenswrapper[4875]: I1007 08:13:19.673540 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/59f07b18-1f61-4d86-9231-0ed7a485b7cb-dns-svc\") pod \"dnsmasq-dns-bbf5cc879-l454k\" (UID: \"59f07b18-1f61-4d86-9231-0ed7a485b7cb\") " pod="openstack/dnsmasq-dns-bbf5cc879-l454k" Oct 07 08:13:19 crc kubenswrapper[4875]: I1007 08:13:19.673566 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/59f07b18-1f61-4d86-9231-0ed7a485b7cb-config\") pod \"dnsmasq-dns-bbf5cc879-l454k\" (UID: \"59f07b18-1f61-4d86-9231-0ed7a485b7cb\") " pod="openstack/dnsmasq-dns-bbf5cc879-l454k" Oct 07 08:13:19 crc kubenswrapper[4875]: I1007 08:13:19.673646 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/59f07b18-1f61-4d86-9231-0ed7a485b7cb-ovsdbserver-sb\") pod \"dnsmasq-dns-bbf5cc879-l454k\" (UID: \"59f07b18-1f61-4d86-9231-0ed7a485b7cb\") " pod="openstack/dnsmasq-dns-bbf5cc879-l454k" Oct 07 08:13:19 crc kubenswrapper[4875]: I1007 08:13:19.673671 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9czg4\" (UniqueName: \"kubernetes.io/projected/59f07b18-1f61-4d86-9231-0ed7a485b7cb-kube-api-access-9czg4\") pod \"dnsmasq-dns-bbf5cc879-l454k\" (UID: \"59f07b18-1f61-4d86-9231-0ed7a485b7cb\") " pod="openstack/dnsmasq-dns-bbf5cc879-l454k" Oct 07 08:13:19 crc kubenswrapper[4875]: I1007 08:13:19.673702 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/78c5bd94-76f8-49ab-841a-c6bfe4087ab0-fernet-keys\") pod \"keystone-bootstrap-bkg7r\" (UID: \"78c5bd94-76f8-49ab-841a-c6bfe4087ab0\") " pod="openstack/keystone-bootstrap-bkg7r" Oct 07 08:13:19 crc kubenswrapper[4875]: I1007 08:13:19.673726 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/78c5bd94-76f8-49ab-841a-c6bfe4087ab0-credential-keys\") pod \"keystone-bootstrap-bkg7r\" (UID: \"78c5bd94-76f8-49ab-841a-c6bfe4087ab0\") " pod="openstack/keystone-bootstrap-bkg7r" Oct 07 08:13:19 crc kubenswrapper[4875]: I1007 08:13:19.673744 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nhdrf\" (UniqueName: \"kubernetes.io/projected/78c5bd94-76f8-49ab-841a-c6bfe4087ab0-kube-api-access-nhdrf\") pod \"keystone-bootstrap-bkg7r\" (UID: \"78c5bd94-76f8-49ab-841a-c6bfe4087ab0\") " pod="openstack/keystone-bootstrap-bkg7r" Oct 07 08:13:19 crc kubenswrapper[4875]: I1007 08:13:19.673775 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/78c5bd94-76f8-49ab-841a-c6bfe4087ab0-combined-ca-bundle\") pod \"keystone-bootstrap-bkg7r\" (UID: \"78c5bd94-76f8-49ab-841a-c6bfe4087ab0\") " pod="openstack/keystone-bootstrap-bkg7r" Oct 07 08:13:19 crc kubenswrapper[4875]: I1007 08:13:19.673812 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/78c5bd94-76f8-49ab-841a-c6bfe4087ab0-config-data\") pod \"keystone-bootstrap-bkg7r\" (UID: \"78c5bd94-76f8-49ab-841a-c6bfe4087ab0\") " pod="openstack/keystone-bootstrap-bkg7r" Oct 07 08:13:19 crc kubenswrapper[4875]: I1007 08:13:19.673867 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/59f07b18-1f61-4d86-9231-0ed7a485b7cb-dns-swift-storage-0\") pod \"dnsmasq-dns-bbf5cc879-l454k\" (UID: \"59f07b18-1f61-4d86-9231-0ed7a485b7cb\") " pod="openstack/dnsmasq-dns-bbf5cc879-l454k" Oct 07 08:13:19 crc kubenswrapper[4875]: I1007 08:13:19.673902 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/59f07b18-1f61-4d86-9231-0ed7a485b7cb-ovsdbserver-nb\") pod \"dnsmasq-dns-bbf5cc879-l454k\" (UID: \"59f07b18-1f61-4d86-9231-0ed7a485b7cb\") " pod="openstack/dnsmasq-dns-bbf5cc879-l454k" Oct 07 08:13:19 crc kubenswrapper[4875]: I1007 08:13:19.673938 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/78c5bd94-76f8-49ab-841a-c6bfe4087ab0-scripts\") pod \"keystone-bootstrap-bkg7r\" (UID: \"78c5bd94-76f8-49ab-841a-c6bfe4087ab0\") " pod="openstack/keystone-bootstrap-bkg7r" Oct 07 08:13:19 crc kubenswrapper[4875]: I1007 08:13:19.683668 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/59f07b18-1f61-4d86-9231-0ed7a485b7cb-dns-swift-storage-0\") pod \"dnsmasq-dns-bbf5cc879-l454k\" (UID: \"59f07b18-1f61-4d86-9231-0ed7a485b7cb\") " pod="openstack/dnsmasq-dns-bbf5cc879-l454k" Oct 07 08:13:19 crc kubenswrapper[4875]: I1007 08:13:19.691640 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/59f07b18-1f61-4d86-9231-0ed7a485b7cb-ovsdbserver-sb\") pod \"dnsmasq-dns-bbf5cc879-l454k\" (UID: \"59f07b18-1f61-4d86-9231-0ed7a485b7cb\") " pod="openstack/dnsmasq-dns-bbf5cc879-l454k" Oct 07 08:13:19 crc kubenswrapper[4875]: I1007 08:13:19.691930 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/59f07b18-1f61-4d86-9231-0ed7a485b7cb-config\") pod \"dnsmasq-dns-bbf5cc879-l454k\" (UID: \"59f07b18-1f61-4d86-9231-0ed7a485b7cb\") " pod="openstack/dnsmasq-dns-bbf5cc879-l454k" Oct 07 08:13:19 crc kubenswrapper[4875]: I1007 08:13:19.692402 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/59f07b18-1f61-4d86-9231-0ed7a485b7cb-ovsdbserver-nb\") pod \"dnsmasq-dns-bbf5cc879-l454k\" (UID: \"59f07b18-1f61-4d86-9231-0ed7a485b7cb\") " pod="openstack/dnsmasq-dns-bbf5cc879-l454k" Oct 07 08:13:19 crc kubenswrapper[4875]: I1007 08:13:19.692769 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/59f07b18-1f61-4d86-9231-0ed7a485b7cb-dns-svc\") pod \"dnsmasq-dns-bbf5cc879-l454k\" (UID: \"59f07b18-1f61-4d86-9231-0ed7a485b7cb\") " pod="openstack/dnsmasq-dns-bbf5cc879-l454k" Oct 07 08:13:19 crc kubenswrapper[4875]: I1007 08:13:19.707141 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/78c5bd94-76f8-49ab-841a-c6bfe4087ab0-fernet-keys\") pod \"keystone-bootstrap-bkg7r\" (UID: \"78c5bd94-76f8-49ab-841a-c6bfe4087ab0\") " pod="openstack/keystone-bootstrap-bkg7r" Oct 07 08:13:19 crc kubenswrapper[4875]: I1007 08:13:19.707451 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/78c5bd94-76f8-49ab-841a-c6bfe4087ab0-credential-keys\") pod \"keystone-bootstrap-bkg7r\" (UID: \"78c5bd94-76f8-49ab-841a-c6bfe4087ab0\") " pod="openstack/keystone-bootstrap-bkg7r" Oct 07 08:13:19 crc kubenswrapper[4875]: I1007 08:13:19.711188 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f9d30e8d-f3fb-4aec-8b1a-0546a89bda6d-kube-api-access-bg2v9" (OuterVolumeSpecName: "kube-api-access-bg2v9") pod "f9d30e8d-f3fb-4aec-8b1a-0546a89bda6d" (UID: "f9d30e8d-f3fb-4aec-8b1a-0546a89bda6d"). InnerVolumeSpecName "kube-api-access-bg2v9". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 08:13:19 crc kubenswrapper[4875]: I1007 08:13:19.714569 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/78c5bd94-76f8-49ab-841a-c6bfe4087ab0-config-data\") pod \"keystone-bootstrap-bkg7r\" (UID: \"78c5bd94-76f8-49ab-841a-c6bfe4087ab0\") " pod="openstack/keystone-bootstrap-bkg7r" Oct 07 08:13:19 crc kubenswrapper[4875]: I1007 08:13:19.727222 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/78c5bd94-76f8-49ab-841a-c6bfe4087ab0-scripts\") pod \"keystone-bootstrap-bkg7r\" (UID: \"78c5bd94-76f8-49ab-841a-c6bfe4087ab0\") " pod="openstack/keystone-bootstrap-bkg7r" Oct 07 08:13:19 crc kubenswrapper[4875]: I1007 08:13:19.743932 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/78c5bd94-76f8-49ab-841a-c6bfe4087ab0-combined-ca-bundle\") pod \"keystone-bootstrap-bkg7r\" (UID: \"78c5bd94-76f8-49ab-841a-c6bfe4087ab0\") " pod="openstack/keystone-bootstrap-bkg7r" Oct 07 08:13:19 crc kubenswrapper[4875]: I1007 08:13:19.765297 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-6dd767d9c-bdd6z"] Oct 07 08:13:19 crc kubenswrapper[4875]: E1007 08:13:19.765662 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f9d30e8d-f3fb-4aec-8b1a-0546a89bda6d" containerName="mariadb-account-create" Oct 07 08:13:19 crc kubenswrapper[4875]: I1007 08:13:19.765678 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="f9d30e8d-f3fb-4aec-8b1a-0546a89bda6d" containerName="mariadb-account-create" Oct 07 08:13:19 crc kubenswrapper[4875]: I1007 08:13:19.765940 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="f9d30e8d-f3fb-4aec-8b1a-0546a89bda6d" containerName="mariadb-account-create" Oct 07 08:13:19 crc kubenswrapper[4875]: I1007 08:13:19.766990 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-6dd767d9c-bdd6z" Oct 07 08:13:19 crc kubenswrapper[4875]: I1007 08:13:19.773460 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"horizon-config-data" Oct 07 08:13:19 crc kubenswrapper[4875]: I1007 08:13:19.773639 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9czg4\" (UniqueName: \"kubernetes.io/projected/59f07b18-1f61-4d86-9231-0ed7a485b7cb-kube-api-access-9czg4\") pod \"dnsmasq-dns-bbf5cc879-l454k\" (UID: \"59f07b18-1f61-4d86-9231-0ed7a485b7cb\") " pod="openstack/dnsmasq-dns-bbf5cc879-l454k" Oct 07 08:13:19 crc kubenswrapper[4875]: I1007 08:13:19.777563 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nhdrf\" (UniqueName: \"kubernetes.io/projected/78c5bd94-76f8-49ab-841a-c6bfe4087ab0-kube-api-access-nhdrf\") pod \"keystone-bootstrap-bkg7r\" (UID: \"78c5bd94-76f8-49ab-841a-c6bfe4087ab0\") " pod="openstack/keystone-bootstrap-bkg7r" Oct 07 08:13:19 crc kubenswrapper[4875]: I1007 08:13:19.778159 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bg2v9\" (UniqueName: \"kubernetes.io/projected/f9d30e8d-f3fb-4aec-8b1a-0546a89bda6d-kube-api-access-bg2v9\") on node \"crc\" DevicePath \"\"" Oct 07 08:13:19 crc kubenswrapper[4875]: I1007 08:13:19.787322 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"horizon-scripts" Oct 07 08:13:19 crc kubenswrapper[4875]: I1007 08:13:19.787538 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"horizon" Oct 07 08:13:19 crc kubenswrapper[4875]: I1007 08:13:19.813277 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"horizon-horizon-dockercfg-wms7m" Oct 07 08:13:19 crc kubenswrapper[4875]: I1007 08:13:19.832958 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-6dd767d9c-bdd6z"] Oct 07 08:13:19 crc kubenswrapper[4875]: I1007 08:13:19.887898 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-bbf5cc879-l454k" Oct 07 08:13:19 crc kubenswrapper[4875]: I1007 08:13:19.910030 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-bkg7r" Oct 07 08:13:19 crc kubenswrapper[4875]: I1007 08:13:19.911372 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2bgn5\" (UniqueName: \"kubernetes.io/projected/4fc34509-2d69-45f6-95ba-75b873dc58ed-kube-api-access-2bgn5\") pod \"horizon-6dd767d9c-bdd6z\" (UID: \"4fc34509-2d69-45f6-95ba-75b873dc58ed\") " pod="openstack/horizon-6dd767d9c-bdd6z" Oct 07 08:13:19 crc kubenswrapper[4875]: I1007 08:13:19.911564 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4fc34509-2d69-45f6-95ba-75b873dc58ed-logs\") pod \"horizon-6dd767d9c-bdd6z\" (UID: \"4fc34509-2d69-45f6-95ba-75b873dc58ed\") " pod="openstack/horizon-6dd767d9c-bdd6z" Oct 07 08:13:19 crc kubenswrapper[4875]: I1007 08:13:19.911666 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/4fc34509-2d69-45f6-95ba-75b873dc58ed-config-data\") pod \"horizon-6dd767d9c-bdd6z\" (UID: \"4fc34509-2d69-45f6-95ba-75b873dc58ed\") " pod="openstack/horizon-6dd767d9c-bdd6z" Oct 07 08:13:19 crc kubenswrapper[4875]: I1007 08:13:19.911759 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/4fc34509-2d69-45f6-95ba-75b873dc58ed-horizon-secret-key\") pod \"horizon-6dd767d9c-bdd6z\" (UID: \"4fc34509-2d69-45f6-95ba-75b873dc58ed\") " pod="openstack/horizon-6dd767d9c-bdd6z" Oct 07 08:13:19 crc kubenswrapper[4875]: I1007 08:13:19.911791 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/4fc34509-2d69-45f6-95ba-75b873dc58ed-scripts\") pod \"horizon-6dd767d9c-bdd6z\" (UID: \"4fc34509-2d69-45f6-95ba-75b873dc58ed\") " pod="openstack/horizon-6dd767d9c-bdd6z" Oct 07 08:13:20 crc kubenswrapper[4875]: I1007 08:13:20.014214 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/4fc34509-2d69-45f6-95ba-75b873dc58ed-horizon-secret-key\") pod \"horizon-6dd767d9c-bdd6z\" (UID: \"4fc34509-2d69-45f6-95ba-75b873dc58ed\") " pod="openstack/horizon-6dd767d9c-bdd6z" Oct 07 08:13:20 crc kubenswrapper[4875]: I1007 08:13:20.014284 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/4fc34509-2d69-45f6-95ba-75b873dc58ed-scripts\") pod \"horizon-6dd767d9c-bdd6z\" (UID: \"4fc34509-2d69-45f6-95ba-75b873dc58ed\") " pod="openstack/horizon-6dd767d9c-bdd6z" Oct 07 08:13:20 crc kubenswrapper[4875]: I1007 08:13:20.014377 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2bgn5\" (UniqueName: \"kubernetes.io/projected/4fc34509-2d69-45f6-95ba-75b873dc58ed-kube-api-access-2bgn5\") pod \"horizon-6dd767d9c-bdd6z\" (UID: \"4fc34509-2d69-45f6-95ba-75b873dc58ed\") " pod="openstack/horizon-6dd767d9c-bdd6z" Oct 07 08:13:20 crc kubenswrapper[4875]: I1007 08:13:20.014446 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4fc34509-2d69-45f6-95ba-75b873dc58ed-logs\") pod \"horizon-6dd767d9c-bdd6z\" (UID: \"4fc34509-2d69-45f6-95ba-75b873dc58ed\") " pod="openstack/horizon-6dd767d9c-bdd6z" Oct 07 08:13:20 crc kubenswrapper[4875]: I1007 08:13:20.014481 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/4fc34509-2d69-45f6-95ba-75b873dc58ed-config-data\") pod \"horizon-6dd767d9c-bdd6z\" (UID: \"4fc34509-2d69-45f6-95ba-75b873dc58ed\") " pod="openstack/horizon-6dd767d9c-bdd6z" Oct 07 08:13:20 crc kubenswrapper[4875]: I1007 08:13:20.015532 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4fc34509-2d69-45f6-95ba-75b873dc58ed-logs\") pod \"horizon-6dd767d9c-bdd6z\" (UID: \"4fc34509-2d69-45f6-95ba-75b873dc58ed\") " pod="openstack/horizon-6dd767d9c-bdd6z" Oct 07 08:13:20 crc kubenswrapper[4875]: I1007 08:13:20.016471 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/4fc34509-2d69-45f6-95ba-75b873dc58ed-config-data\") pod \"horizon-6dd767d9c-bdd6z\" (UID: \"4fc34509-2d69-45f6-95ba-75b873dc58ed\") " pod="openstack/horizon-6dd767d9c-bdd6z" Oct 07 08:13:20 crc kubenswrapper[4875]: I1007 08:13:20.019198 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/4fc34509-2d69-45f6-95ba-75b873dc58ed-scripts\") pod \"horizon-6dd767d9c-bdd6z\" (UID: \"4fc34509-2d69-45f6-95ba-75b873dc58ed\") " pod="openstack/horizon-6dd767d9c-bdd6z" Oct 07 08:13:20 crc kubenswrapper[4875]: I1007 08:13:20.020576 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/4fc34509-2d69-45f6-95ba-75b873dc58ed-horizon-secret-key\") pod \"horizon-6dd767d9c-bdd6z\" (UID: \"4fc34509-2d69-45f6-95ba-75b873dc58ed\") " pod="openstack/horizon-6dd767d9c-bdd6z" Oct 07 08:13:20 crc kubenswrapper[4875]: I1007 08:13:20.024194 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-bbf5cc879-l454k"] Oct 07 08:13:20 crc kubenswrapper[4875]: I1007 08:13:20.047512 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2bgn5\" (UniqueName: \"kubernetes.io/projected/4fc34509-2d69-45f6-95ba-75b873dc58ed-kube-api-access-2bgn5\") pod \"horizon-6dd767d9c-bdd6z\" (UID: \"4fc34509-2d69-45f6-95ba-75b873dc58ed\") " pod="openstack/horizon-6dd767d9c-bdd6z" Oct 07 08:13:20 crc kubenswrapper[4875]: I1007 08:13:20.145650 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-sync-s9cbr"] Oct 07 08:13:20 crc kubenswrapper[4875]: I1007 08:13:20.146888 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-s9cbr" Oct 07 08:13:20 crc kubenswrapper[4875]: I1007 08:13:20.154143 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Oct 07 08:13:20 crc kubenswrapper[4875]: I1007 08:13:20.154759 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-84b9bd675-95vpd"] Oct 07 08:13:20 crc kubenswrapper[4875]: I1007 08:13:20.157792 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-84b9bd675-95vpd" Oct 07 08:13:20 crc kubenswrapper[4875]: I1007 08:13:20.158296 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-6dd767d9c-bdd6z" Oct 07 08:13:20 crc kubenswrapper[4875]: I1007 08:13:20.163131 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-mz6wn" Oct 07 08:13:20 crc kubenswrapper[4875]: I1007 08:13:20.163270 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Oct 07 08:13:20 crc kubenswrapper[4875]: I1007 08:13:20.210494 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-f53c-account-create-gm78b" Oct 07 08:13:20 crc kubenswrapper[4875]: I1007 08:13:20.220510 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/0796acc2-62bc-446e-aba7-72d4065a9e3b-config-data\") pod \"horizon-84b9bd675-95vpd\" (UID: \"0796acc2-62bc-446e-aba7-72d4065a9e3b\") " pod="openstack/horizon-84b9bd675-95vpd" Oct 07 08:13:20 crc kubenswrapper[4875]: I1007 08:13:20.220581 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2c511555-5539-4e0f-9693-8ecc46fc4b8a-logs\") pod \"placement-db-sync-s9cbr\" (UID: \"2c511555-5539-4e0f-9693-8ecc46fc4b8a\") " pod="openstack/placement-db-sync-s9cbr" Oct 07 08:13:20 crc kubenswrapper[4875]: I1007 08:13:20.220652 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2c511555-5539-4e0f-9693-8ecc46fc4b8a-combined-ca-bundle\") pod \"placement-db-sync-s9cbr\" (UID: \"2c511555-5539-4e0f-9693-8ecc46fc4b8a\") " pod="openstack/placement-db-sync-s9cbr" Oct 07 08:13:20 crc kubenswrapper[4875]: I1007 08:13:20.220711 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0796acc2-62bc-446e-aba7-72d4065a9e3b-logs\") pod \"horizon-84b9bd675-95vpd\" (UID: \"0796acc2-62bc-446e-aba7-72d4065a9e3b\") " pod="openstack/horizon-84b9bd675-95vpd" Oct 07 08:13:20 crc kubenswrapper[4875]: I1007 08:13:20.220748 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2c511555-5539-4e0f-9693-8ecc46fc4b8a-config-data\") pod \"placement-db-sync-s9cbr\" (UID: \"2c511555-5539-4e0f-9693-8ecc46fc4b8a\") " pod="openstack/placement-db-sync-s9cbr" Oct 07 08:13:20 crc kubenswrapper[4875]: I1007 08:13:20.220788 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2c511555-5539-4e0f-9693-8ecc46fc4b8a-scripts\") pod \"placement-db-sync-s9cbr\" (UID: \"2c511555-5539-4e0f-9693-8ecc46fc4b8a\") " pod="openstack/placement-db-sync-s9cbr" Oct 07 08:13:20 crc kubenswrapper[4875]: I1007 08:13:20.220813 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2j6pl\" (UniqueName: \"kubernetes.io/projected/0796acc2-62bc-446e-aba7-72d4065a9e3b-kube-api-access-2j6pl\") pod \"horizon-84b9bd675-95vpd\" (UID: \"0796acc2-62bc-446e-aba7-72d4065a9e3b\") " pod="openstack/horizon-84b9bd675-95vpd" Oct 07 08:13:20 crc kubenswrapper[4875]: I1007 08:13:20.220852 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/0796acc2-62bc-446e-aba7-72d4065a9e3b-horizon-secret-key\") pod \"horizon-84b9bd675-95vpd\" (UID: \"0796acc2-62bc-446e-aba7-72d4065a9e3b\") " pod="openstack/horizon-84b9bd675-95vpd" Oct 07 08:13:20 crc kubenswrapper[4875]: I1007 08:13:20.235651 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0796acc2-62bc-446e-aba7-72d4065a9e3b-scripts\") pod \"horizon-84b9bd675-95vpd\" (UID: \"0796acc2-62bc-446e-aba7-72d4065a9e3b\") " pod="openstack/horizon-84b9bd675-95vpd" Oct 07 08:13:20 crc kubenswrapper[4875]: I1007 08:13:20.235732 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zpbk5\" (UniqueName: \"kubernetes.io/projected/2c511555-5539-4e0f-9693-8ecc46fc4b8a-kube-api-access-zpbk5\") pod \"placement-db-sync-s9cbr\" (UID: \"2c511555-5539-4e0f-9693-8ecc46fc4b8a\") " pod="openstack/placement-db-sync-s9cbr" Oct 07 08:13:20 crc kubenswrapper[4875]: I1007 08:13:20.265317 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-s9cbr"] Oct 07 08:13:20 crc kubenswrapper[4875]: I1007 08:13:20.307434 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-84b9bd675-95vpd"] Oct 07 08:13:20 crc kubenswrapper[4875]: I1007 08:13:20.339308 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-scc9s\" (UniqueName: \"kubernetes.io/projected/be2a7797-874d-438b-a3a6-8b19d97011df-kube-api-access-scc9s\") pod \"be2a7797-874d-438b-a3a6-8b19d97011df\" (UID: \"be2a7797-874d-438b-a3a6-8b19d97011df\") " Oct 07 08:13:20 crc kubenswrapper[4875]: I1007 08:13:20.357503 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2c511555-5539-4e0f-9693-8ecc46fc4b8a-combined-ca-bundle\") pod \"placement-db-sync-s9cbr\" (UID: \"2c511555-5539-4e0f-9693-8ecc46fc4b8a\") " pod="openstack/placement-db-sync-s9cbr" Oct 07 08:13:20 crc kubenswrapper[4875]: I1007 08:13:20.357704 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0796acc2-62bc-446e-aba7-72d4065a9e3b-logs\") pod \"horizon-84b9bd675-95vpd\" (UID: \"0796acc2-62bc-446e-aba7-72d4065a9e3b\") " pod="openstack/horizon-84b9bd675-95vpd" Oct 07 08:13:20 crc kubenswrapper[4875]: I1007 08:13:20.357785 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2c511555-5539-4e0f-9693-8ecc46fc4b8a-config-data\") pod \"placement-db-sync-s9cbr\" (UID: \"2c511555-5539-4e0f-9693-8ecc46fc4b8a\") " pod="openstack/placement-db-sync-s9cbr" Oct 07 08:13:20 crc kubenswrapper[4875]: I1007 08:13:20.357890 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2c511555-5539-4e0f-9693-8ecc46fc4b8a-scripts\") pod \"placement-db-sync-s9cbr\" (UID: \"2c511555-5539-4e0f-9693-8ecc46fc4b8a\") " pod="openstack/placement-db-sync-s9cbr" Oct 07 08:13:20 crc kubenswrapper[4875]: I1007 08:13:20.357964 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2j6pl\" (UniqueName: \"kubernetes.io/projected/0796acc2-62bc-446e-aba7-72d4065a9e3b-kube-api-access-2j6pl\") pod \"horizon-84b9bd675-95vpd\" (UID: \"0796acc2-62bc-446e-aba7-72d4065a9e3b\") " pod="openstack/horizon-84b9bd675-95vpd" Oct 07 08:13:20 crc kubenswrapper[4875]: I1007 08:13:20.358040 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/0796acc2-62bc-446e-aba7-72d4065a9e3b-horizon-secret-key\") pod \"horizon-84b9bd675-95vpd\" (UID: \"0796acc2-62bc-446e-aba7-72d4065a9e3b\") " pod="openstack/horizon-84b9bd675-95vpd" Oct 07 08:13:20 crc kubenswrapper[4875]: I1007 08:13:20.358093 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0796acc2-62bc-446e-aba7-72d4065a9e3b-scripts\") pod \"horizon-84b9bd675-95vpd\" (UID: \"0796acc2-62bc-446e-aba7-72d4065a9e3b\") " pod="openstack/horizon-84b9bd675-95vpd" Oct 07 08:13:20 crc kubenswrapper[4875]: I1007 08:13:20.358125 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zpbk5\" (UniqueName: \"kubernetes.io/projected/2c511555-5539-4e0f-9693-8ecc46fc4b8a-kube-api-access-zpbk5\") pod \"placement-db-sync-s9cbr\" (UID: \"2c511555-5539-4e0f-9693-8ecc46fc4b8a\") " pod="openstack/placement-db-sync-s9cbr" Oct 07 08:13:20 crc kubenswrapper[4875]: I1007 08:13:20.358185 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/0796acc2-62bc-446e-aba7-72d4065a9e3b-config-data\") pod \"horizon-84b9bd675-95vpd\" (UID: \"0796acc2-62bc-446e-aba7-72d4065a9e3b\") " pod="openstack/horizon-84b9bd675-95vpd" Oct 07 08:13:20 crc kubenswrapper[4875]: I1007 08:13:20.358244 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2c511555-5539-4e0f-9693-8ecc46fc4b8a-logs\") pod \"placement-db-sync-s9cbr\" (UID: \"2c511555-5539-4e0f-9693-8ecc46fc4b8a\") " pod="openstack/placement-db-sync-s9cbr" Oct 07 08:13:20 crc kubenswrapper[4875]: I1007 08:13:20.358897 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2c511555-5539-4e0f-9693-8ecc46fc4b8a-logs\") pod \"placement-db-sync-s9cbr\" (UID: \"2c511555-5539-4e0f-9693-8ecc46fc4b8a\") " pod="openstack/placement-db-sync-s9cbr" Oct 07 08:13:20 crc kubenswrapper[4875]: I1007 08:13:20.378369 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0796acc2-62bc-446e-aba7-72d4065a9e3b-logs\") pod \"horizon-84b9bd675-95vpd\" (UID: \"0796acc2-62bc-446e-aba7-72d4065a9e3b\") " pod="openstack/horizon-84b9bd675-95vpd" Oct 07 08:13:20 crc kubenswrapper[4875]: I1007 08:13:20.390076 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2c511555-5539-4e0f-9693-8ecc46fc4b8a-combined-ca-bundle\") pod \"placement-db-sync-s9cbr\" (UID: \"2c511555-5539-4e0f-9693-8ecc46fc4b8a\") " pod="openstack/placement-db-sync-s9cbr" Oct 07 08:13:20 crc kubenswrapper[4875]: I1007 08:13:20.395662 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/0796acc2-62bc-446e-aba7-72d4065a9e3b-config-data\") pod \"horizon-84b9bd675-95vpd\" (UID: \"0796acc2-62bc-446e-aba7-72d4065a9e3b\") " pod="openstack/horizon-84b9bd675-95vpd" Oct 07 08:13:20 crc kubenswrapper[4875]: I1007 08:13:20.396089 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-f53c-account-create-gm78b" event={"ID":"be2a7797-874d-438b-a3a6-8b19d97011df","Type":"ContainerDied","Data":"163d30a7341216b93200ebab4882d0f38eae8475835bd45703403c5bd4200dd1"} Oct 07 08:13:20 crc kubenswrapper[4875]: I1007 08:13:20.396129 4875 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="163d30a7341216b93200ebab4882d0f38eae8475835bd45703403c5bd4200dd1" Oct 07 08:13:20 crc kubenswrapper[4875]: I1007 08:13:20.396254 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-f53c-account-create-gm78b" Oct 07 08:13:20 crc kubenswrapper[4875]: I1007 08:13:20.405212 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0796acc2-62bc-446e-aba7-72d4065a9e3b-scripts\") pod \"horizon-84b9bd675-95vpd\" (UID: \"0796acc2-62bc-446e-aba7-72d4065a9e3b\") " pod="openstack/horizon-84b9bd675-95vpd" Oct 07 08:13:20 crc kubenswrapper[4875]: I1007 08:13:20.407653 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/be2a7797-874d-438b-a3a6-8b19d97011df-kube-api-access-scc9s" (OuterVolumeSpecName: "kube-api-access-scc9s") pod "be2a7797-874d-438b-a3a6-8b19d97011df" (UID: "be2a7797-874d-438b-a3a6-8b19d97011df"). InnerVolumeSpecName "kube-api-access-scc9s". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 08:13:20 crc kubenswrapper[4875]: I1007 08:13:20.414800 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/0796acc2-62bc-446e-aba7-72d4065a9e3b-horizon-secret-key\") pod \"horizon-84b9bd675-95vpd\" (UID: \"0796acc2-62bc-446e-aba7-72d4065a9e3b\") " pod="openstack/horizon-84b9bd675-95vpd" Oct 07 08:13:20 crc kubenswrapper[4875]: I1007 08:13:20.416351 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-fcfd-account-create-l824k" Oct 07 08:13:20 crc kubenswrapper[4875]: I1007 08:13:20.416987 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-fcfd-account-create-l824k" event={"ID":"f9d30e8d-f3fb-4aec-8b1a-0546a89bda6d","Type":"ContainerDied","Data":"020f0cea24db571348d3f876fcd172d925c384caf642d3b26a00b45d66da5e36"} Oct 07 08:13:20 crc kubenswrapper[4875]: I1007 08:13:20.417028 4875 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="020f0cea24db571348d3f876fcd172d925c384caf642d3b26a00b45d66da5e36" Oct 07 08:13:20 crc kubenswrapper[4875]: I1007 08:13:20.419964 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-56df8fb6b7-l2fn2"] Oct 07 08:13:20 crc kubenswrapper[4875]: E1007 08:13:20.420663 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="be2a7797-874d-438b-a3a6-8b19d97011df" containerName="mariadb-account-create" Oct 07 08:13:20 crc kubenswrapper[4875]: I1007 08:13:20.420679 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="be2a7797-874d-438b-a3a6-8b19d97011df" containerName="mariadb-account-create" Oct 07 08:13:20 crc kubenswrapper[4875]: I1007 08:13:20.420943 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="be2a7797-874d-438b-a3a6-8b19d97011df" containerName="mariadb-account-create" Oct 07 08:13:20 crc kubenswrapper[4875]: I1007 08:13:20.422117 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-56df8fb6b7-l2fn2" Oct 07 08:13:20 crc kubenswrapper[4875]: I1007 08:13:20.424221 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2c511555-5539-4e0f-9693-8ecc46fc4b8a-scripts\") pod \"placement-db-sync-s9cbr\" (UID: \"2c511555-5539-4e0f-9693-8ecc46fc4b8a\") " pod="openstack/placement-db-sync-s9cbr" Oct 07 08:13:20 crc kubenswrapper[4875]: I1007 08:13:20.428645 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zpbk5\" (UniqueName: \"kubernetes.io/projected/2c511555-5539-4e0f-9693-8ecc46fc4b8a-kube-api-access-zpbk5\") pod \"placement-db-sync-s9cbr\" (UID: \"2c511555-5539-4e0f-9693-8ecc46fc4b8a\") " pod="openstack/placement-db-sync-s9cbr" Oct 07 08:13:20 crc kubenswrapper[4875]: I1007 08:13:20.446086 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2j6pl\" (UniqueName: \"kubernetes.io/projected/0796acc2-62bc-446e-aba7-72d4065a9e3b-kube-api-access-2j6pl\") pod \"horizon-84b9bd675-95vpd\" (UID: \"0796acc2-62bc-446e-aba7-72d4065a9e3b\") " pod="openstack/horizon-84b9bd675-95vpd" Oct 07 08:13:20 crc kubenswrapper[4875]: I1007 08:13:20.463325 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2c511555-5539-4e0f-9693-8ecc46fc4b8a-config-data\") pod \"placement-db-sync-s9cbr\" (UID: \"2c511555-5539-4e0f-9693-8ecc46fc4b8a\") " pod="openstack/placement-db-sync-s9cbr" Oct 07 08:13:20 crc kubenswrapper[4875]: I1007 08:13:20.464414 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-scc9s\" (UniqueName: \"kubernetes.io/projected/be2a7797-874d-438b-a3a6-8b19d97011df-kube-api-access-scc9s\") on node \"crc\" DevicePath \"\"" Oct 07 08:13:20 crc kubenswrapper[4875]: I1007 08:13:20.464997 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-56df8fb6b7-l2fn2"] Oct 07 08:13:20 crc kubenswrapper[4875]: I1007 08:13:20.503036 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-84b9bd675-95vpd" Oct 07 08:13:20 crc kubenswrapper[4875]: I1007 08:13:20.509969 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Oct 07 08:13:20 crc kubenswrapper[4875]: I1007 08:13:20.511903 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 07 08:13:20 crc kubenswrapper[4875]: I1007 08:13:20.514103 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 07 08:13:20 crc kubenswrapper[4875]: I1007 08:13:20.517789 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-scripts" Oct 07 08:13:20 crc kubenswrapper[4875]: I1007 08:13:20.518050 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Oct 07 08:13:20 crc kubenswrapper[4875]: I1007 08:13:20.518314 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Oct 07 08:13:20 crc kubenswrapper[4875]: I1007 08:13:20.518554 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-ldtf9" Oct 07 08:13:20 crc kubenswrapper[4875]: I1007 08:13:20.570276 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 07 08:13:20 crc kubenswrapper[4875]: I1007 08:13:20.573106 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-s9cbr" Oct 07 08:13:20 crc kubenswrapper[4875]: I1007 08:13:20.573634 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/85df9aa3-cf70-444e-99c5-7d3780c2d621-config-data\") pod \"glance-default-external-api-0\" (UID: \"85df9aa3-cf70-444e-99c5-7d3780c2d621\") " pod="openstack/glance-default-external-api-0" Oct 07 08:13:20 crc kubenswrapper[4875]: I1007 08:13:20.573707 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-59z6p\" (UniqueName: \"kubernetes.io/projected/85df9aa3-cf70-444e-99c5-7d3780c2d621-kube-api-access-59z6p\") pod \"glance-default-external-api-0\" (UID: \"85df9aa3-cf70-444e-99c5-7d3780c2d621\") " pod="openstack/glance-default-external-api-0" Oct 07 08:13:20 crc kubenswrapper[4875]: I1007 08:13:20.573748 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/85df9aa3-cf70-444e-99c5-7d3780c2d621-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"85df9aa3-cf70-444e-99c5-7d3780c2d621\") " pod="openstack/glance-default-external-api-0" Oct 07 08:13:20 crc kubenswrapper[4875]: I1007 08:13:20.573776 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/85df9aa3-cf70-444e-99c5-7d3780c2d621-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"85df9aa3-cf70-444e-99c5-7d3780c2d621\") " pod="openstack/glance-default-external-api-0" Oct 07 08:13:20 crc kubenswrapper[4875]: I1007 08:13:20.573809 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/85df9aa3-cf70-444e-99c5-7d3780c2d621-scripts\") pod \"glance-default-external-api-0\" (UID: \"85df9aa3-cf70-444e-99c5-7d3780c2d621\") " pod="openstack/glance-default-external-api-0" Oct 07 08:13:20 crc kubenswrapper[4875]: I1007 08:13:20.573832 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5b0d35a4-cfa4-4c0a-8ca2-4db85aefc765-config\") pod \"dnsmasq-dns-56df8fb6b7-l2fn2\" (UID: \"5b0d35a4-cfa4-4c0a-8ca2-4db85aefc765\") " pod="openstack/dnsmasq-dns-56df8fb6b7-l2fn2" Oct 07 08:13:20 crc kubenswrapper[4875]: I1007 08:13:20.573856 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4g2qt\" (UniqueName: \"kubernetes.io/projected/5b0d35a4-cfa4-4c0a-8ca2-4db85aefc765-kube-api-access-4g2qt\") pod \"dnsmasq-dns-56df8fb6b7-l2fn2\" (UID: \"5b0d35a4-cfa4-4c0a-8ca2-4db85aefc765\") " pod="openstack/dnsmasq-dns-56df8fb6b7-l2fn2" Oct 07 08:13:20 crc kubenswrapper[4875]: I1007 08:13:20.573925 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/85df9aa3-cf70-444e-99c5-7d3780c2d621-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"85df9aa3-cf70-444e-99c5-7d3780c2d621\") " pod="openstack/glance-default-external-api-0" Oct 07 08:13:20 crc kubenswrapper[4875]: I1007 08:13:20.573968 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/85df9aa3-cf70-444e-99c5-7d3780c2d621-logs\") pod \"glance-default-external-api-0\" (UID: \"85df9aa3-cf70-444e-99c5-7d3780c2d621\") " pod="openstack/glance-default-external-api-0" Oct 07 08:13:20 crc kubenswrapper[4875]: I1007 08:13:20.573996 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5b0d35a4-cfa4-4c0a-8ca2-4db85aefc765-dns-svc\") pod \"dnsmasq-dns-56df8fb6b7-l2fn2\" (UID: \"5b0d35a4-cfa4-4c0a-8ca2-4db85aefc765\") " pod="openstack/dnsmasq-dns-56df8fb6b7-l2fn2" Oct 07 08:13:20 crc kubenswrapper[4875]: I1007 08:13:20.574036 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/5b0d35a4-cfa4-4c0a-8ca2-4db85aefc765-dns-swift-storage-0\") pod \"dnsmasq-dns-56df8fb6b7-l2fn2\" (UID: \"5b0d35a4-cfa4-4c0a-8ca2-4db85aefc765\") " pod="openstack/dnsmasq-dns-56df8fb6b7-l2fn2" Oct 07 08:13:20 crc kubenswrapper[4875]: I1007 08:13:20.574091 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-external-api-0\" (UID: \"85df9aa3-cf70-444e-99c5-7d3780c2d621\") " pod="openstack/glance-default-external-api-0" Oct 07 08:13:20 crc kubenswrapper[4875]: I1007 08:13:20.574118 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5b0d35a4-cfa4-4c0a-8ca2-4db85aefc765-ovsdbserver-nb\") pod \"dnsmasq-dns-56df8fb6b7-l2fn2\" (UID: \"5b0d35a4-cfa4-4c0a-8ca2-4db85aefc765\") " pod="openstack/dnsmasq-dns-56df8fb6b7-l2fn2" Oct 07 08:13:20 crc kubenswrapper[4875]: I1007 08:13:20.574142 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5b0d35a4-cfa4-4c0a-8ca2-4db85aefc765-ovsdbserver-sb\") pod \"dnsmasq-dns-56df8fb6b7-l2fn2\" (UID: \"5b0d35a4-cfa4-4c0a-8ca2-4db85aefc765\") " pod="openstack/dnsmasq-dns-56df8fb6b7-l2fn2" Oct 07 08:13:20 crc kubenswrapper[4875]: I1007 08:13:20.584000 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 07 08:13:20 crc kubenswrapper[4875]: I1007 08:13:20.584116 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 07 08:13:20 crc kubenswrapper[4875]: I1007 08:13:20.588729 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Oct 07 08:13:20 crc kubenswrapper[4875]: I1007 08:13:20.593429 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Oct 07 08:13:20 crc kubenswrapper[4875]: I1007 08:13:20.676644 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5b0d35a4-cfa4-4c0a-8ca2-4db85aefc765-config\") pod \"dnsmasq-dns-56df8fb6b7-l2fn2\" (UID: \"5b0d35a4-cfa4-4c0a-8ca2-4db85aefc765\") " pod="openstack/dnsmasq-dns-56df8fb6b7-l2fn2" Oct 07 08:13:20 crc kubenswrapper[4875]: I1007 08:13:20.676695 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4g2qt\" (UniqueName: \"kubernetes.io/projected/5b0d35a4-cfa4-4c0a-8ca2-4db85aefc765-kube-api-access-4g2qt\") pod \"dnsmasq-dns-56df8fb6b7-l2fn2\" (UID: \"5b0d35a4-cfa4-4c0a-8ca2-4db85aefc765\") " pod="openstack/dnsmasq-dns-56df8fb6b7-l2fn2" Oct 07 08:13:20 crc kubenswrapper[4875]: I1007 08:13:20.676725 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cfbb2\" (UniqueName: \"kubernetes.io/projected/46209aef-15e8-4cf8-8a66-ee09384cc053-kube-api-access-cfbb2\") pod \"glance-default-internal-api-0\" (UID: \"46209aef-15e8-4cf8-8a66-ee09384cc053\") " pod="openstack/glance-default-internal-api-0" Oct 07 08:13:20 crc kubenswrapper[4875]: I1007 08:13:20.676755 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/46209aef-15e8-4cf8-8a66-ee09384cc053-scripts\") pod \"glance-default-internal-api-0\" (UID: \"46209aef-15e8-4cf8-8a66-ee09384cc053\") " pod="openstack/glance-default-internal-api-0" Oct 07 08:13:20 crc kubenswrapper[4875]: I1007 08:13:20.676789 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/85df9aa3-cf70-444e-99c5-7d3780c2d621-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"85df9aa3-cf70-444e-99c5-7d3780c2d621\") " pod="openstack/glance-default-external-api-0" Oct 07 08:13:20 crc kubenswrapper[4875]: I1007 08:13:20.676827 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/46209aef-15e8-4cf8-8a66-ee09384cc053-config-data\") pod \"glance-default-internal-api-0\" (UID: \"46209aef-15e8-4cf8-8a66-ee09384cc053\") " pod="openstack/glance-default-internal-api-0" Oct 07 08:13:20 crc kubenswrapper[4875]: I1007 08:13:20.676841 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/46209aef-15e8-4cf8-8a66-ee09384cc053-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"46209aef-15e8-4cf8-8a66-ee09384cc053\") " pod="openstack/glance-default-internal-api-0" Oct 07 08:13:20 crc kubenswrapper[4875]: I1007 08:13:20.676871 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/85df9aa3-cf70-444e-99c5-7d3780c2d621-logs\") pod \"glance-default-external-api-0\" (UID: \"85df9aa3-cf70-444e-99c5-7d3780c2d621\") " pod="openstack/glance-default-external-api-0" Oct 07 08:13:20 crc kubenswrapper[4875]: I1007 08:13:20.676912 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5b0d35a4-cfa4-4c0a-8ca2-4db85aefc765-dns-svc\") pod \"dnsmasq-dns-56df8fb6b7-l2fn2\" (UID: \"5b0d35a4-cfa4-4c0a-8ca2-4db85aefc765\") " pod="openstack/dnsmasq-dns-56df8fb6b7-l2fn2" Oct 07 08:13:20 crc kubenswrapper[4875]: I1007 08:13:20.676943 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/5b0d35a4-cfa4-4c0a-8ca2-4db85aefc765-dns-swift-storage-0\") pod \"dnsmasq-dns-56df8fb6b7-l2fn2\" (UID: \"5b0d35a4-cfa4-4c0a-8ca2-4db85aefc765\") " pod="openstack/dnsmasq-dns-56df8fb6b7-l2fn2" Oct 07 08:13:20 crc kubenswrapper[4875]: I1007 08:13:20.676983 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-external-api-0\" (UID: \"85df9aa3-cf70-444e-99c5-7d3780c2d621\") " pod="openstack/glance-default-external-api-0" Oct 07 08:13:20 crc kubenswrapper[4875]: I1007 08:13:20.677003 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5b0d35a4-cfa4-4c0a-8ca2-4db85aefc765-ovsdbserver-nb\") pod \"dnsmasq-dns-56df8fb6b7-l2fn2\" (UID: \"5b0d35a4-cfa4-4c0a-8ca2-4db85aefc765\") " pod="openstack/dnsmasq-dns-56df8fb6b7-l2fn2" Oct 07 08:13:20 crc kubenswrapper[4875]: I1007 08:13:20.677022 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5b0d35a4-cfa4-4c0a-8ca2-4db85aefc765-ovsdbserver-sb\") pod \"dnsmasq-dns-56df8fb6b7-l2fn2\" (UID: \"5b0d35a4-cfa4-4c0a-8ca2-4db85aefc765\") " pod="openstack/dnsmasq-dns-56df8fb6b7-l2fn2" Oct 07 08:13:20 crc kubenswrapper[4875]: I1007 08:13:20.677042 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-internal-api-0\" (UID: \"46209aef-15e8-4cf8-8a66-ee09384cc053\") " pod="openstack/glance-default-internal-api-0" Oct 07 08:13:20 crc kubenswrapper[4875]: I1007 08:13:20.677062 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/46209aef-15e8-4cf8-8a66-ee09384cc053-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"46209aef-15e8-4cf8-8a66-ee09384cc053\") " pod="openstack/glance-default-internal-api-0" Oct 07 08:13:20 crc kubenswrapper[4875]: I1007 08:13:20.677093 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/85df9aa3-cf70-444e-99c5-7d3780c2d621-config-data\") pod \"glance-default-external-api-0\" (UID: \"85df9aa3-cf70-444e-99c5-7d3780c2d621\") " pod="openstack/glance-default-external-api-0" Oct 07 08:13:20 crc kubenswrapper[4875]: I1007 08:13:20.677125 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/46209aef-15e8-4cf8-8a66-ee09384cc053-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"46209aef-15e8-4cf8-8a66-ee09384cc053\") " pod="openstack/glance-default-internal-api-0" Oct 07 08:13:20 crc kubenswrapper[4875]: I1007 08:13:20.677154 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-59z6p\" (UniqueName: \"kubernetes.io/projected/85df9aa3-cf70-444e-99c5-7d3780c2d621-kube-api-access-59z6p\") pod \"glance-default-external-api-0\" (UID: \"85df9aa3-cf70-444e-99c5-7d3780c2d621\") " pod="openstack/glance-default-external-api-0" Oct 07 08:13:20 crc kubenswrapper[4875]: I1007 08:13:20.677189 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/85df9aa3-cf70-444e-99c5-7d3780c2d621-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"85df9aa3-cf70-444e-99c5-7d3780c2d621\") " pod="openstack/glance-default-external-api-0" Oct 07 08:13:20 crc kubenswrapper[4875]: I1007 08:13:20.677211 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/85df9aa3-cf70-444e-99c5-7d3780c2d621-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"85df9aa3-cf70-444e-99c5-7d3780c2d621\") " pod="openstack/glance-default-external-api-0" Oct 07 08:13:20 crc kubenswrapper[4875]: I1007 08:13:20.677232 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/46209aef-15e8-4cf8-8a66-ee09384cc053-logs\") pod \"glance-default-internal-api-0\" (UID: \"46209aef-15e8-4cf8-8a66-ee09384cc053\") " pod="openstack/glance-default-internal-api-0" Oct 07 08:13:20 crc kubenswrapper[4875]: I1007 08:13:20.677248 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/85df9aa3-cf70-444e-99c5-7d3780c2d621-scripts\") pod \"glance-default-external-api-0\" (UID: \"85df9aa3-cf70-444e-99c5-7d3780c2d621\") " pod="openstack/glance-default-external-api-0" Oct 07 08:13:20 crc kubenswrapper[4875]: I1007 08:13:20.677957 4875 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-external-api-0\" (UID: \"85df9aa3-cf70-444e-99c5-7d3780c2d621\") device mount path \"/mnt/openstack/pv12\"" pod="openstack/glance-default-external-api-0" Oct 07 08:13:20 crc kubenswrapper[4875]: I1007 08:13:20.681120 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/85df9aa3-cf70-444e-99c5-7d3780c2d621-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"85df9aa3-cf70-444e-99c5-7d3780c2d621\") " pod="openstack/glance-default-external-api-0" Oct 07 08:13:20 crc kubenswrapper[4875]: I1007 08:13:20.686406 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/85df9aa3-cf70-444e-99c5-7d3780c2d621-logs\") pod \"glance-default-external-api-0\" (UID: \"85df9aa3-cf70-444e-99c5-7d3780c2d621\") " pod="openstack/glance-default-external-api-0" Oct 07 08:13:20 crc kubenswrapper[4875]: I1007 08:13:20.687419 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/5b0d35a4-cfa4-4c0a-8ca2-4db85aefc765-dns-swift-storage-0\") pod \"dnsmasq-dns-56df8fb6b7-l2fn2\" (UID: \"5b0d35a4-cfa4-4c0a-8ca2-4db85aefc765\") " pod="openstack/dnsmasq-dns-56df8fb6b7-l2fn2" Oct 07 08:13:20 crc kubenswrapper[4875]: I1007 08:13:20.687508 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/85df9aa3-cf70-444e-99c5-7d3780c2d621-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"85df9aa3-cf70-444e-99c5-7d3780c2d621\") " pod="openstack/glance-default-external-api-0" Oct 07 08:13:20 crc kubenswrapper[4875]: I1007 08:13:20.688325 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5b0d35a4-cfa4-4c0a-8ca2-4db85aefc765-config\") pod \"dnsmasq-dns-56df8fb6b7-l2fn2\" (UID: \"5b0d35a4-cfa4-4c0a-8ca2-4db85aefc765\") " pod="openstack/dnsmasq-dns-56df8fb6b7-l2fn2" Oct 07 08:13:20 crc kubenswrapper[4875]: I1007 08:13:20.688604 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5b0d35a4-cfa4-4c0a-8ca2-4db85aefc765-ovsdbserver-nb\") pod \"dnsmasq-dns-56df8fb6b7-l2fn2\" (UID: \"5b0d35a4-cfa4-4c0a-8ca2-4db85aefc765\") " pod="openstack/dnsmasq-dns-56df8fb6b7-l2fn2" Oct 07 08:13:20 crc kubenswrapper[4875]: I1007 08:13:20.689025 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/85df9aa3-cf70-444e-99c5-7d3780c2d621-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"85df9aa3-cf70-444e-99c5-7d3780c2d621\") " pod="openstack/glance-default-external-api-0" Oct 07 08:13:20 crc kubenswrapper[4875]: I1007 08:13:20.689182 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/85df9aa3-cf70-444e-99c5-7d3780c2d621-config-data\") pod \"glance-default-external-api-0\" (UID: \"85df9aa3-cf70-444e-99c5-7d3780c2d621\") " pod="openstack/glance-default-external-api-0" Oct 07 08:13:20 crc kubenswrapper[4875]: I1007 08:13:20.689303 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5b0d35a4-cfa4-4c0a-8ca2-4db85aefc765-ovsdbserver-sb\") pod \"dnsmasq-dns-56df8fb6b7-l2fn2\" (UID: \"5b0d35a4-cfa4-4c0a-8ca2-4db85aefc765\") " pod="openstack/dnsmasq-dns-56df8fb6b7-l2fn2" Oct 07 08:13:20 crc kubenswrapper[4875]: I1007 08:13:20.691655 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-bkg7r"] Oct 07 08:13:20 crc kubenswrapper[4875]: I1007 08:13:20.692616 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5b0d35a4-cfa4-4c0a-8ca2-4db85aefc765-dns-svc\") pod \"dnsmasq-dns-56df8fb6b7-l2fn2\" (UID: \"5b0d35a4-cfa4-4c0a-8ca2-4db85aefc765\") " pod="openstack/dnsmasq-dns-56df8fb6b7-l2fn2" Oct 07 08:13:20 crc kubenswrapper[4875]: I1007 08:13:20.700075 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/85df9aa3-cf70-444e-99c5-7d3780c2d621-scripts\") pod \"glance-default-external-api-0\" (UID: \"85df9aa3-cf70-444e-99c5-7d3780c2d621\") " pod="openstack/glance-default-external-api-0" Oct 07 08:13:20 crc kubenswrapper[4875]: W1007 08:13:20.703332 4875 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod78c5bd94_76f8_49ab_841a_c6bfe4087ab0.slice/crio-4c7c14d8083509b8f0aa8b35fd29564e1a1b5b22a5a787792b0bf3b53084d485 WatchSource:0}: Error finding container 4c7c14d8083509b8f0aa8b35fd29564e1a1b5b22a5a787792b0bf3b53084d485: Status 404 returned error can't find the container with id 4c7c14d8083509b8f0aa8b35fd29564e1a1b5b22a5a787792b0bf3b53084d485 Oct 07 08:13:20 crc kubenswrapper[4875]: I1007 08:13:20.720984 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-59z6p\" (UniqueName: \"kubernetes.io/projected/85df9aa3-cf70-444e-99c5-7d3780c2d621-kube-api-access-59z6p\") pod \"glance-default-external-api-0\" (UID: \"85df9aa3-cf70-444e-99c5-7d3780c2d621\") " pod="openstack/glance-default-external-api-0" Oct 07 08:13:20 crc kubenswrapper[4875]: I1007 08:13:20.725665 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4g2qt\" (UniqueName: \"kubernetes.io/projected/5b0d35a4-cfa4-4c0a-8ca2-4db85aefc765-kube-api-access-4g2qt\") pod \"dnsmasq-dns-56df8fb6b7-l2fn2\" (UID: \"5b0d35a4-cfa4-4c0a-8ca2-4db85aefc765\") " pod="openstack/dnsmasq-dns-56df8fb6b7-l2fn2" Oct 07 08:13:20 crc kubenswrapper[4875]: I1007 08:13:20.797394 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-internal-api-0\" (UID: \"46209aef-15e8-4cf8-8a66-ee09384cc053\") " pod="openstack/glance-default-internal-api-0" Oct 07 08:13:20 crc kubenswrapper[4875]: I1007 08:13:20.801360 4875 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-internal-api-0\" (UID: \"46209aef-15e8-4cf8-8a66-ee09384cc053\") device mount path \"/mnt/openstack/pv07\"" pod="openstack/glance-default-internal-api-0" Oct 07 08:13:20 crc kubenswrapper[4875]: I1007 08:13:20.801545 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/46209aef-15e8-4cf8-8a66-ee09384cc053-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"46209aef-15e8-4cf8-8a66-ee09384cc053\") " pod="openstack/glance-default-internal-api-0" Oct 07 08:13:20 crc kubenswrapper[4875]: I1007 08:13:20.802485 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/46209aef-15e8-4cf8-8a66-ee09384cc053-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"46209aef-15e8-4cf8-8a66-ee09384cc053\") " pod="openstack/glance-default-internal-api-0" Oct 07 08:13:20 crc kubenswrapper[4875]: I1007 08:13:20.802673 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/46209aef-15e8-4cf8-8a66-ee09384cc053-logs\") pod \"glance-default-internal-api-0\" (UID: \"46209aef-15e8-4cf8-8a66-ee09384cc053\") " pod="openstack/glance-default-internal-api-0" Oct 07 08:13:20 crc kubenswrapper[4875]: I1007 08:13:20.802729 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cfbb2\" (UniqueName: \"kubernetes.io/projected/46209aef-15e8-4cf8-8a66-ee09384cc053-kube-api-access-cfbb2\") pod \"glance-default-internal-api-0\" (UID: \"46209aef-15e8-4cf8-8a66-ee09384cc053\") " pod="openstack/glance-default-internal-api-0" Oct 07 08:13:20 crc kubenswrapper[4875]: I1007 08:13:20.802785 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/46209aef-15e8-4cf8-8a66-ee09384cc053-scripts\") pod \"glance-default-internal-api-0\" (UID: \"46209aef-15e8-4cf8-8a66-ee09384cc053\") " pod="openstack/glance-default-internal-api-0" Oct 07 08:13:20 crc kubenswrapper[4875]: I1007 08:13:20.802949 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/46209aef-15e8-4cf8-8a66-ee09384cc053-config-data\") pod \"glance-default-internal-api-0\" (UID: \"46209aef-15e8-4cf8-8a66-ee09384cc053\") " pod="openstack/glance-default-internal-api-0" Oct 07 08:13:20 crc kubenswrapper[4875]: I1007 08:13:20.803029 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/46209aef-15e8-4cf8-8a66-ee09384cc053-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"46209aef-15e8-4cf8-8a66-ee09384cc053\") " pod="openstack/glance-default-internal-api-0" Oct 07 08:13:20 crc kubenswrapper[4875]: I1007 08:13:20.803918 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/46209aef-15e8-4cf8-8a66-ee09384cc053-logs\") pod \"glance-default-internal-api-0\" (UID: \"46209aef-15e8-4cf8-8a66-ee09384cc053\") " pod="openstack/glance-default-internal-api-0" Oct 07 08:13:20 crc kubenswrapper[4875]: I1007 08:13:20.804574 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/46209aef-15e8-4cf8-8a66-ee09384cc053-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"46209aef-15e8-4cf8-8a66-ee09384cc053\") " pod="openstack/glance-default-internal-api-0" Oct 07 08:13:20 crc kubenswrapper[4875]: I1007 08:13:20.820498 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/46209aef-15e8-4cf8-8a66-ee09384cc053-scripts\") pod \"glance-default-internal-api-0\" (UID: \"46209aef-15e8-4cf8-8a66-ee09384cc053\") " pod="openstack/glance-default-internal-api-0" Oct 07 08:13:20 crc kubenswrapper[4875]: I1007 08:13:20.821056 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/46209aef-15e8-4cf8-8a66-ee09384cc053-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"46209aef-15e8-4cf8-8a66-ee09384cc053\") " pod="openstack/glance-default-internal-api-0" Oct 07 08:13:20 crc kubenswrapper[4875]: I1007 08:13:20.826437 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/46209aef-15e8-4cf8-8a66-ee09384cc053-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"46209aef-15e8-4cf8-8a66-ee09384cc053\") " pod="openstack/glance-default-internal-api-0" Oct 07 08:13:20 crc kubenswrapper[4875]: I1007 08:13:20.834354 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-bbf5cc879-l454k"] Oct 07 08:13:20 crc kubenswrapper[4875]: I1007 08:13:20.845645 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/46209aef-15e8-4cf8-8a66-ee09384cc053-config-data\") pod \"glance-default-internal-api-0\" (UID: \"46209aef-15e8-4cf8-8a66-ee09384cc053\") " pod="openstack/glance-default-internal-api-0" Oct 07 08:13:20 crc kubenswrapper[4875]: I1007 08:13:20.863049 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-56df8fb6b7-l2fn2" Oct 07 08:13:20 crc kubenswrapper[4875]: I1007 08:13:20.871827 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cfbb2\" (UniqueName: \"kubernetes.io/projected/46209aef-15e8-4cf8-8a66-ee09384cc053-kube-api-access-cfbb2\") pod \"glance-default-internal-api-0\" (UID: \"46209aef-15e8-4cf8-8a66-ee09384cc053\") " pod="openstack/glance-default-internal-api-0" Oct 07 08:13:20 crc kubenswrapper[4875]: I1007 08:13:20.883066 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-external-api-0\" (UID: \"85df9aa3-cf70-444e-99c5-7d3780c2d621\") " pod="openstack/glance-default-external-api-0" Oct 07 08:13:20 crc kubenswrapper[4875]: I1007 08:13:20.932440 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-internal-api-0\" (UID: \"46209aef-15e8-4cf8-8a66-ee09384cc053\") " pod="openstack/glance-default-internal-api-0" Oct 07 08:13:21 crc kubenswrapper[4875]: I1007 08:13:21.059945 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Oct 07 08:13:21 crc kubenswrapper[4875]: I1007 08:13:21.088803 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 07 08:13:21 crc kubenswrapper[4875]: I1007 08:13:21.088930 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 07 08:13:21 crc kubenswrapper[4875]: I1007 08:13:21.092999 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Oct 07 08:13:21 crc kubenswrapper[4875]: I1007 08:13:21.093261 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Oct 07 08:13:21 crc kubenswrapper[4875]: I1007 08:13:21.188056 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 07 08:13:21 crc kubenswrapper[4875]: I1007 08:13:21.216310 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 07 08:13:21 crc kubenswrapper[4875]: I1007 08:13:21.229730 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0149ac72-d21f-45a8-a322-a97d03c1d85a-scripts\") pod \"ceilometer-0\" (UID: \"0149ac72-d21f-45a8-a322-a97d03c1d85a\") " pod="openstack/ceilometer-0" Oct 07 08:13:21 crc kubenswrapper[4875]: I1007 08:13:21.230027 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0149ac72-d21f-45a8-a322-a97d03c1d85a-run-httpd\") pod \"ceilometer-0\" (UID: \"0149ac72-d21f-45a8-a322-a97d03c1d85a\") " pod="openstack/ceilometer-0" Oct 07 08:13:21 crc kubenswrapper[4875]: I1007 08:13:21.230206 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/0149ac72-d21f-45a8-a322-a97d03c1d85a-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"0149ac72-d21f-45a8-a322-a97d03c1d85a\") " pod="openstack/ceilometer-0" Oct 07 08:13:21 crc kubenswrapper[4875]: I1007 08:13:21.230288 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0149ac72-d21f-45a8-a322-a97d03c1d85a-config-data\") pod \"ceilometer-0\" (UID: \"0149ac72-d21f-45a8-a322-a97d03c1d85a\") " pod="openstack/ceilometer-0" Oct 07 08:13:21 crc kubenswrapper[4875]: I1007 08:13:21.230449 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sd876\" (UniqueName: \"kubernetes.io/projected/0149ac72-d21f-45a8-a322-a97d03c1d85a-kube-api-access-sd876\") pod \"ceilometer-0\" (UID: \"0149ac72-d21f-45a8-a322-a97d03c1d85a\") " pod="openstack/ceilometer-0" Oct 07 08:13:21 crc kubenswrapper[4875]: I1007 08:13:21.230560 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0149ac72-d21f-45a8-a322-a97d03c1d85a-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"0149ac72-d21f-45a8-a322-a97d03c1d85a\") " pod="openstack/ceilometer-0" Oct 07 08:13:21 crc kubenswrapper[4875]: I1007 08:13:21.230647 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0149ac72-d21f-45a8-a322-a97d03c1d85a-log-httpd\") pod \"ceilometer-0\" (UID: \"0149ac72-d21f-45a8-a322-a97d03c1d85a\") " pod="openstack/ceilometer-0" Oct 07 08:13:21 crc kubenswrapper[4875]: I1007 08:13:21.283578 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-6dd767d9c-bdd6z"] Oct 07 08:13:21 crc kubenswrapper[4875]: I1007 08:13:21.332287 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0149ac72-d21f-45a8-a322-a97d03c1d85a-run-httpd\") pod \"ceilometer-0\" (UID: \"0149ac72-d21f-45a8-a322-a97d03c1d85a\") " pod="openstack/ceilometer-0" Oct 07 08:13:21 crc kubenswrapper[4875]: I1007 08:13:21.332362 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/0149ac72-d21f-45a8-a322-a97d03c1d85a-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"0149ac72-d21f-45a8-a322-a97d03c1d85a\") " pod="openstack/ceilometer-0" Oct 07 08:13:21 crc kubenswrapper[4875]: I1007 08:13:21.332384 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0149ac72-d21f-45a8-a322-a97d03c1d85a-config-data\") pod \"ceilometer-0\" (UID: \"0149ac72-d21f-45a8-a322-a97d03c1d85a\") " pod="openstack/ceilometer-0" Oct 07 08:13:21 crc kubenswrapper[4875]: I1007 08:13:21.332420 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sd876\" (UniqueName: \"kubernetes.io/projected/0149ac72-d21f-45a8-a322-a97d03c1d85a-kube-api-access-sd876\") pod \"ceilometer-0\" (UID: \"0149ac72-d21f-45a8-a322-a97d03c1d85a\") " pod="openstack/ceilometer-0" Oct 07 08:13:21 crc kubenswrapper[4875]: I1007 08:13:21.332448 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0149ac72-d21f-45a8-a322-a97d03c1d85a-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"0149ac72-d21f-45a8-a322-a97d03c1d85a\") " pod="openstack/ceilometer-0" Oct 07 08:13:21 crc kubenswrapper[4875]: I1007 08:13:21.332464 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0149ac72-d21f-45a8-a322-a97d03c1d85a-log-httpd\") pod \"ceilometer-0\" (UID: \"0149ac72-d21f-45a8-a322-a97d03c1d85a\") " pod="openstack/ceilometer-0" Oct 07 08:13:21 crc kubenswrapper[4875]: I1007 08:13:21.332502 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0149ac72-d21f-45a8-a322-a97d03c1d85a-scripts\") pod \"ceilometer-0\" (UID: \"0149ac72-d21f-45a8-a322-a97d03c1d85a\") " pod="openstack/ceilometer-0" Oct 07 08:13:21 crc kubenswrapper[4875]: I1007 08:13:21.335562 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0149ac72-d21f-45a8-a322-a97d03c1d85a-log-httpd\") pod \"ceilometer-0\" (UID: \"0149ac72-d21f-45a8-a322-a97d03c1d85a\") " pod="openstack/ceilometer-0" Oct 07 08:13:21 crc kubenswrapper[4875]: I1007 08:13:21.347640 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/0149ac72-d21f-45a8-a322-a97d03c1d85a-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"0149ac72-d21f-45a8-a322-a97d03c1d85a\") " pod="openstack/ceilometer-0" Oct 07 08:13:21 crc kubenswrapper[4875]: I1007 08:13:21.347664 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0149ac72-d21f-45a8-a322-a97d03c1d85a-scripts\") pod \"ceilometer-0\" (UID: \"0149ac72-d21f-45a8-a322-a97d03c1d85a\") " pod="openstack/ceilometer-0" Oct 07 08:13:21 crc kubenswrapper[4875]: I1007 08:13:21.357382 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0149ac72-d21f-45a8-a322-a97d03c1d85a-config-data\") pod \"ceilometer-0\" (UID: \"0149ac72-d21f-45a8-a322-a97d03c1d85a\") " pod="openstack/ceilometer-0" Oct 07 08:13:21 crc kubenswrapper[4875]: I1007 08:13:21.358453 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0149ac72-d21f-45a8-a322-a97d03c1d85a-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"0149ac72-d21f-45a8-a322-a97d03c1d85a\") " pod="openstack/ceilometer-0" Oct 07 08:13:21 crc kubenswrapper[4875]: I1007 08:13:21.388508 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sd876\" (UniqueName: \"kubernetes.io/projected/0149ac72-d21f-45a8-a322-a97d03c1d85a-kube-api-access-sd876\") pod \"ceilometer-0\" (UID: \"0149ac72-d21f-45a8-a322-a97d03c1d85a\") " pod="openstack/ceilometer-0" Oct 07 08:13:21 crc kubenswrapper[4875]: I1007 08:13:21.389016 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0149ac72-d21f-45a8-a322-a97d03c1d85a-run-httpd\") pod \"ceilometer-0\" (UID: \"0149ac72-d21f-45a8-a322-a97d03c1d85a\") " pod="openstack/ceilometer-0" Oct 07 08:13:21 crc kubenswrapper[4875]: I1007 08:13:21.405642 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-s9cbr"] Oct 07 08:13:21 crc kubenswrapper[4875]: I1007 08:13:21.412153 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 07 08:13:21 crc kubenswrapper[4875]: I1007 08:13:21.433452 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-b4f5-account-create-npspj" Oct 07 08:13:21 crc kubenswrapper[4875]: I1007 08:13:21.494302 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-bkg7r" event={"ID":"78c5bd94-76f8-49ab-841a-c6bfe4087ab0","Type":"ContainerStarted","Data":"81d856a9695463b7906f8eb2f780ac834435136db46dcf26d9332e4ae59c40ac"} Oct 07 08:13:21 crc kubenswrapper[4875]: I1007 08:13:21.494364 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-bkg7r" event={"ID":"78c5bd94-76f8-49ab-841a-c6bfe4087ab0","Type":"ContainerStarted","Data":"4c7c14d8083509b8f0aa8b35fd29564e1a1b5b22a5a787792b0bf3b53084d485"} Oct 07 08:13:21 crc kubenswrapper[4875]: I1007 08:13:21.517963 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-b4f5-account-create-npspj" event={"ID":"03f50e66-b42a-4cf1-ba35-47b295394cab","Type":"ContainerDied","Data":"e2a6b13a68fcd126dce731037b109685f60421f4d625ce637903bfa837223677"} Oct 07 08:13:21 crc kubenswrapper[4875]: I1007 08:13:21.518015 4875 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e2a6b13a68fcd126dce731037b109685f60421f4d625ce637903bfa837223677" Oct 07 08:13:21 crc kubenswrapper[4875]: I1007 08:13:21.518104 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-b4f5-account-create-npspj" Oct 07 08:13:21 crc kubenswrapper[4875]: I1007 08:13:21.535476 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-bbf5cc879-l454k" event={"ID":"59f07b18-1f61-4d86-9231-0ed7a485b7cb","Type":"ContainerStarted","Data":"582108a5199604ef15396ca885d72276d93e37cdccff05a45d1792e4aae09786"} Oct 07 08:13:21 crc kubenswrapper[4875]: I1007 08:13:21.535612 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cqbjw\" (UniqueName: \"kubernetes.io/projected/03f50e66-b42a-4cf1-ba35-47b295394cab-kube-api-access-cqbjw\") pod \"03f50e66-b42a-4cf1-ba35-47b295394cab\" (UID: \"03f50e66-b42a-4cf1-ba35-47b295394cab\") " Oct 07 08:13:21 crc kubenswrapper[4875]: I1007 08:13:21.535917 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-bkg7r" podStartSLOduration=2.535894392 podStartE2EDuration="2.535894392s" podCreationTimestamp="2025-10-07 08:13:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 08:13:21.518681772 +0000 UTC m=+1026.478452325" watchObservedRunningTime="2025-10-07 08:13:21.535894392 +0000 UTC m=+1026.495664935" Oct 07 08:13:21 crc kubenswrapper[4875]: I1007 08:13:21.551348 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-84b9bd675-95vpd"] Oct 07 08:13:21 crc kubenswrapper[4875]: I1007 08:13:21.553823 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6dd767d9c-bdd6z" event={"ID":"4fc34509-2d69-45f6-95ba-75b873dc58ed","Type":"ContainerStarted","Data":"37c266891db9c74a11da493170b631d01bb05aa7c745c98d9ddfd4f36a528c63"} Oct 07 08:13:21 crc kubenswrapper[4875]: I1007 08:13:21.558565 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/03f50e66-b42a-4cf1-ba35-47b295394cab-kube-api-access-cqbjw" (OuterVolumeSpecName: "kube-api-access-cqbjw") pod "03f50e66-b42a-4cf1-ba35-47b295394cab" (UID: "03f50e66-b42a-4cf1-ba35-47b295394cab"). InnerVolumeSpecName "kube-api-access-cqbjw". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 08:13:21 crc kubenswrapper[4875]: I1007 08:13:21.597572 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-s9cbr" event={"ID":"2c511555-5539-4e0f-9693-8ecc46fc4b8a","Type":"ContainerStarted","Data":"84fa5629d77fa6d214c90fa4737fb649e00d84afaadf9a53ebf3a3322bcaf834"} Oct 07 08:13:21 crc kubenswrapper[4875]: I1007 08:13:21.638171 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cqbjw\" (UniqueName: \"kubernetes.io/projected/03f50e66-b42a-4cf1-ba35-47b295394cab-kube-api-access-cqbjw\") on node \"crc\" DevicePath \"\"" Oct 07 08:13:21 crc kubenswrapper[4875]: I1007 08:13:21.716751 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-56df8fb6b7-l2fn2"] Oct 07 08:13:22 crc kubenswrapper[4875]: I1007 08:13:22.052615 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 07 08:13:22 crc kubenswrapper[4875]: I1007 08:13:22.107395 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 07 08:13:22 crc kubenswrapper[4875]: I1007 08:13:22.164205 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-sync-km8j5"] Oct 07 08:13:22 crc kubenswrapper[4875]: E1007 08:13:22.164845 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="03f50e66-b42a-4cf1-ba35-47b295394cab" containerName="mariadb-account-create" Oct 07 08:13:22 crc kubenswrapper[4875]: I1007 08:13:22.164863 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="03f50e66-b42a-4cf1-ba35-47b295394cab" containerName="mariadb-account-create" Oct 07 08:13:22 crc kubenswrapper[4875]: I1007 08:13:22.165068 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="03f50e66-b42a-4cf1-ba35-47b295394cab" containerName="mariadb-account-create" Oct 07 08:13:22 crc kubenswrapper[4875]: I1007 08:13:22.165917 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-km8j5" Oct 07 08:13:22 crc kubenswrapper[4875]: I1007 08:13:22.176154 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Oct 07 08:13:22 crc kubenswrapper[4875]: I1007 08:13:22.176348 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-ls5zf" Oct 07 08:13:22 crc kubenswrapper[4875]: I1007 08:13:22.197614 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-km8j5"] Oct 07 08:13:22 crc kubenswrapper[4875]: I1007 08:13:22.237939 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 07 08:13:22 crc kubenswrapper[4875]: I1007 08:13:22.270928 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7ede5949-4681-4699-befa-f13a645d1f4c-combined-ca-bundle\") pod \"barbican-db-sync-km8j5\" (UID: \"7ede5949-4681-4699-befa-f13a645d1f4c\") " pod="openstack/barbican-db-sync-km8j5" Oct 07 08:13:22 crc kubenswrapper[4875]: I1007 08:13:22.270994 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ntpkv\" (UniqueName: \"kubernetes.io/projected/7ede5949-4681-4699-befa-f13a645d1f4c-kube-api-access-ntpkv\") pod \"barbican-db-sync-km8j5\" (UID: \"7ede5949-4681-4699-befa-f13a645d1f4c\") " pod="openstack/barbican-db-sync-km8j5" Oct 07 08:13:22 crc kubenswrapper[4875]: I1007 08:13:22.271061 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/7ede5949-4681-4699-befa-f13a645d1f4c-db-sync-config-data\") pod \"barbican-db-sync-km8j5\" (UID: \"7ede5949-4681-4699-befa-f13a645d1f4c\") " pod="openstack/barbican-db-sync-km8j5" Oct 07 08:13:22 crc kubenswrapper[4875]: I1007 08:13:22.350019 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-6dd767d9c-bdd6z"] Oct 07 08:13:22 crc kubenswrapper[4875]: I1007 08:13:22.375004 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/7ede5949-4681-4699-befa-f13a645d1f4c-db-sync-config-data\") pod \"barbican-db-sync-km8j5\" (UID: \"7ede5949-4681-4699-befa-f13a645d1f4c\") " pod="openstack/barbican-db-sync-km8j5" Oct 07 08:13:22 crc kubenswrapper[4875]: I1007 08:13:22.375392 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7ede5949-4681-4699-befa-f13a645d1f4c-combined-ca-bundle\") pod \"barbican-db-sync-km8j5\" (UID: \"7ede5949-4681-4699-befa-f13a645d1f4c\") " pod="openstack/barbican-db-sync-km8j5" Oct 07 08:13:22 crc kubenswrapper[4875]: I1007 08:13:22.375425 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ntpkv\" (UniqueName: \"kubernetes.io/projected/7ede5949-4681-4699-befa-f13a645d1f4c-kube-api-access-ntpkv\") pod \"barbican-db-sync-km8j5\" (UID: \"7ede5949-4681-4699-befa-f13a645d1f4c\") " pod="openstack/barbican-db-sync-km8j5" Oct 07 08:13:22 crc kubenswrapper[4875]: I1007 08:13:22.388503 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 07 08:13:22 crc kubenswrapper[4875]: I1007 08:13:22.389313 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/7ede5949-4681-4699-befa-f13a645d1f4c-db-sync-config-data\") pod \"barbican-db-sync-km8j5\" (UID: \"7ede5949-4681-4699-befa-f13a645d1f4c\") " pod="openstack/barbican-db-sync-km8j5" Oct 07 08:13:22 crc kubenswrapper[4875]: I1007 08:13:22.390614 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7ede5949-4681-4699-befa-f13a645d1f4c-combined-ca-bundle\") pod \"barbican-db-sync-km8j5\" (UID: \"7ede5949-4681-4699-befa-f13a645d1f4c\") " pod="openstack/barbican-db-sync-km8j5" Oct 07 08:13:22 crc kubenswrapper[4875]: I1007 08:13:22.404389 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-57f6dd6d75-v26bw"] Oct 07 08:13:22 crc kubenswrapper[4875]: I1007 08:13:22.411459 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ntpkv\" (UniqueName: \"kubernetes.io/projected/7ede5949-4681-4699-befa-f13a645d1f4c-kube-api-access-ntpkv\") pod \"barbican-db-sync-km8j5\" (UID: \"7ede5949-4681-4699-befa-f13a645d1f4c\") " pod="openstack/barbican-db-sync-km8j5" Oct 07 08:13:22 crc kubenswrapper[4875]: I1007 08:13:22.441094 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-57f6dd6d75-v26bw" Oct 07 08:13:22 crc kubenswrapper[4875]: I1007 08:13:22.553109 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 07 08:13:22 crc kubenswrapper[4875]: I1007 08:13:22.574600 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-km8j5" Oct 07 08:13:22 crc kubenswrapper[4875]: I1007 08:13:22.615406 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/e118a2f5-641e-419c-b60a-f8f6634b6262-config-data\") pod \"horizon-57f6dd6d75-v26bw\" (UID: \"e118a2f5-641e-419c-b60a-f8f6634b6262\") " pod="openstack/horizon-57f6dd6d75-v26bw" Oct 07 08:13:22 crc kubenswrapper[4875]: I1007 08:13:22.615493 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e118a2f5-641e-419c-b60a-f8f6634b6262-logs\") pod \"horizon-57f6dd6d75-v26bw\" (UID: \"e118a2f5-641e-419c-b60a-f8f6634b6262\") " pod="openstack/horizon-57f6dd6d75-v26bw" Oct 07 08:13:22 crc kubenswrapper[4875]: I1007 08:13:22.615522 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e118a2f5-641e-419c-b60a-f8f6634b6262-scripts\") pod \"horizon-57f6dd6d75-v26bw\" (UID: \"e118a2f5-641e-419c-b60a-f8f6634b6262\") " pod="openstack/horizon-57f6dd6d75-v26bw" Oct 07 08:13:22 crc kubenswrapper[4875]: I1007 08:13:22.615570 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/e118a2f5-641e-419c-b60a-f8f6634b6262-horizon-secret-key\") pod \"horizon-57f6dd6d75-v26bw\" (UID: \"e118a2f5-641e-419c-b60a-f8f6634b6262\") " pod="openstack/horizon-57f6dd6d75-v26bw" Oct 07 08:13:22 crc kubenswrapper[4875]: I1007 08:13:22.615657 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k2l2g\" (UniqueName: \"kubernetes.io/projected/e118a2f5-641e-419c-b60a-f8f6634b6262-kube-api-access-k2l2g\") pod \"horizon-57f6dd6d75-v26bw\" (UID: \"e118a2f5-641e-419c-b60a-f8f6634b6262\") " pod="openstack/horizon-57f6dd6d75-v26bw" Oct 07 08:13:22 crc kubenswrapper[4875]: I1007 08:13:22.644124 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-57f6dd6d75-v26bw"] Oct 07 08:13:22 crc kubenswrapper[4875]: I1007 08:13:22.685571 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"85df9aa3-cf70-444e-99c5-7d3780c2d621","Type":"ContainerStarted","Data":"1e125de36983cc6d16ee158b452dc4d73a4d89e40edb0ca614e8cb989bbc4f1e"} Oct 07 08:13:22 crc kubenswrapper[4875]: I1007 08:13:22.695030 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-sync-8lc4s"] Oct 07 08:13:22 crc kubenswrapper[4875]: I1007 08:13:22.696270 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-8lc4s" Oct 07 08:13:22 crc kubenswrapper[4875]: I1007 08:13:22.699121 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-84b9bd675-95vpd" event={"ID":"0796acc2-62bc-446e-aba7-72d4065a9e3b","Type":"ContainerStarted","Data":"2cd8371085b9541d6a9084e319515549e446ceb5f14d6fb4d954319bec812dcc"} Oct 07 08:13:22 crc kubenswrapper[4875]: I1007 08:13:22.701492 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-8lc4s"] Oct 07 08:13:22 crc kubenswrapper[4875]: I1007 08:13:22.723930 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Oct 07 08:13:22 crc kubenswrapper[4875]: I1007 08:13:22.724427 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-qdjjh" Oct 07 08:13:22 crc kubenswrapper[4875]: I1007 08:13:22.724645 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Oct 07 08:13:22 crc kubenswrapper[4875]: I1007 08:13:22.725166 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k2l2g\" (UniqueName: \"kubernetes.io/projected/e118a2f5-641e-419c-b60a-f8f6634b6262-kube-api-access-k2l2g\") pod \"horizon-57f6dd6d75-v26bw\" (UID: \"e118a2f5-641e-419c-b60a-f8f6634b6262\") " pod="openstack/horizon-57f6dd6d75-v26bw" Oct 07 08:13:22 crc kubenswrapper[4875]: I1007 08:13:22.725241 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/e118a2f5-641e-419c-b60a-f8f6634b6262-config-data\") pod \"horizon-57f6dd6d75-v26bw\" (UID: \"e118a2f5-641e-419c-b60a-f8f6634b6262\") " pod="openstack/horizon-57f6dd6d75-v26bw" Oct 07 08:13:22 crc kubenswrapper[4875]: I1007 08:13:22.725293 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e118a2f5-641e-419c-b60a-f8f6634b6262-logs\") pod \"horizon-57f6dd6d75-v26bw\" (UID: \"e118a2f5-641e-419c-b60a-f8f6634b6262\") " pod="openstack/horizon-57f6dd6d75-v26bw" Oct 07 08:13:22 crc kubenswrapper[4875]: I1007 08:13:22.725314 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e118a2f5-641e-419c-b60a-f8f6634b6262-scripts\") pod \"horizon-57f6dd6d75-v26bw\" (UID: \"e118a2f5-641e-419c-b60a-f8f6634b6262\") " pod="openstack/horizon-57f6dd6d75-v26bw" Oct 07 08:13:22 crc kubenswrapper[4875]: I1007 08:13:22.725355 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/e118a2f5-641e-419c-b60a-f8f6634b6262-horizon-secret-key\") pod \"horizon-57f6dd6d75-v26bw\" (UID: \"e118a2f5-641e-419c-b60a-f8f6634b6262\") " pod="openstack/horizon-57f6dd6d75-v26bw" Oct 07 08:13:22 crc kubenswrapper[4875]: I1007 08:13:22.727144 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/e118a2f5-641e-419c-b60a-f8f6634b6262-config-data\") pod \"horizon-57f6dd6d75-v26bw\" (UID: \"e118a2f5-641e-419c-b60a-f8f6634b6262\") " pod="openstack/horizon-57f6dd6d75-v26bw" Oct 07 08:13:22 crc kubenswrapper[4875]: I1007 08:13:22.727934 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e118a2f5-641e-419c-b60a-f8f6634b6262-logs\") pod \"horizon-57f6dd6d75-v26bw\" (UID: \"e118a2f5-641e-419c-b60a-f8f6634b6262\") " pod="openstack/horizon-57f6dd6d75-v26bw" Oct 07 08:13:22 crc kubenswrapper[4875]: I1007 08:13:22.728295 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e118a2f5-641e-419c-b60a-f8f6634b6262-scripts\") pod \"horizon-57f6dd6d75-v26bw\" (UID: \"e118a2f5-641e-419c-b60a-f8f6634b6262\") " pod="openstack/horizon-57f6dd6d75-v26bw" Oct 07 08:13:22 crc kubenswrapper[4875]: I1007 08:13:22.735005 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/e118a2f5-641e-419c-b60a-f8f6634b6262-horizon-secret-key\") pod \"horizon-57f6dd6d75-v26bw\" (UID: \"e118a2f5-641e-419c-b60a-f8f6634b6262\") " pod="openstack/horizon-57f6dd6d75-v26bw" Oct 07 08:13:22 crc kubenswrapper[4875]: I1007 08:13:22.770577 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k2l2g\" (UniqueName: \"kubernetes.io/projected/e118a2f5-641e-419c-b60a-f8f6634b6262-kube-api-access-k2l2g\") pod \"horizon-57f6dd6d75-v26bw\" (UID: \"e118a2f5-641e-419c-b60a-f8f6634b6262\") " pod="openstack/horizon-57f6dd6d75-v26bw" Oct 07 08:13:22 crc kubenswrapper[4875]: I1007 08:13:22.796134 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0149ac72-d21f-45a8-a322-a97d03c1d85a","Type":"ContainerStarted","Data":"16f61710ee12bcc24a3d18d4bb7a34abcde97473f876851db742525ccc252572"} Oct 07 08:13:22 crc kubenswrapper[4875]: I1007 08:13:22.800426 4875 generic.go:334] "Generic (PLEG): container finished" podID="59f07b18-1f61-4d86-9231-0ed7a485b7cb" containerID="be47ab455377cb502b688e42b9bf4a78163651d2f8bb0db05913559b4487c6e9" exitCode=0 Oct 07 08:13:22 crc kubenswrapper[4875]: I1007 08:13:22.800527 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-bbf5cc879-l454k" event={"ID":"59f07b18-1f61-4d86-9231-0ed7a485b7cb","Type":"ContainerDied","Data":"be47ab455377cb502b688e42b9bf4a78163651d2f8bb0db05913559b4487c6e9"} Oct 07 08:13:22 crc kubenswrapper[4875]: I1007 08:13:22.812292 4875 generic.go:334] "Generic (PLEG): container finished" podID="5b0d35a4-cfa4-4c0a-8ca2-4db85aefc765" containerID="939245df2ac0673eccd8af823818dba7474bbf36457570b9f4fa2c55c136d5ac" exitCode=0 Oct 07 08:13:22 crc kubenswrapper[4875]: I1007 08:13:22.813179 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-56df8fb6b7-l2fn2" event={"ID":"5b0d35a4-cfa4-4c0a-8ca2-4db85aefc765","Type":"ContainerDied","Data":"939245df2ac0673eccd8af823818dba7474bbf36457570b9f4fa2c55c136d5ac"} Oct 07 08:13:22 crc kubenswrapper[4875]: I1007 08:13:22.813210 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-56df8fb6b7-l2fn2" event={"ID":"5b0d35a4-cfa4-4c0a-8ca2-4db85aefc765","Type":"ContainerStarted","Data":"29d5c73a3f9edf536a1f867c1c6de2199360729a987e7872b3ab1e386a866422"} Oct 07 08:13:22 crc kubenswrapper[4875]: I1007 08:13:22.813370 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-57f6dd6d75-v26bw" Oct 07 08:13:22 crc kubenswrapper[4875]: I1007 08:13:22.830104 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/08b9d931-b59f-4e6c-9081-c8b918d37ba8-config-data\") pod \"cinder-db-sync-8lc4s\" (UID: \"08b9d931-b59f-4e6c-9081-c8b918d37ba8\") " pod="openstack/cinder-db-sync-8lc4s" Oct 07 08:13:22 crc kubenswrapper[4875]: I1007 08:13:22.830170 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/08b9d931-b59f-4e6c-9081-c8b918d37ba8-combined-ca-bundle\") pod \"cinder-db-sync-8lc4s\" (UID: \"08b9d931-b59f-4e6c-9081-c8b918d37ba8\") " pod="openstack/cinder-db-sync-8lc4s" Oct 07 08:13:22 crc kubenswrapper[4875]: I1007 08:13:22.830256 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s4csn\" (UniqueName: \"kubernetes.io/projected/08b9d931-b59f-4e6c-9081-c8b918d37ba8-kube-api-access-s4csn\") pod \"cinder-db-sync-8lc4s\" (UID: \"08b9d931-b59f-4e6c-9081-c8b918d37ba8\") " pod="openstack/cinder-db-sync-8lc4s" Oct 07 08:13:22 crc kubenswrapper[4875]: I1007 08:13:22.830281 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/08b9d931-b59f-4e6c-9081-c8b918d37ba8-db-sync-config-data\") pod \"cinder-db-sync-8lc4s\" (UID: \"08b9d931-b59f-4e6c-9081-c8b918d37ba8\") " pod="openstack/cinder-db-sync-8lc4s" Oct 07 08:13:22 crc kubenswrapper[4875]: I1007 08:13:22.830339 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/08b9d931-b59f-4e6c-9081-c8b918d37ba8-scripts\") pod \"cinder-db-sync-8lc4s\" (UID: \"08b9d931-b59f-4e6c-9081-c8b918d37ba8\") " pod="openstack/cinder-db-sync-8lc4s" Oct 07 08:13:22 crc kubenswrapper[4875]: I1007 08:13:22.830364 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/08b9d931-b59f-4e6c-9081-c8b918d37ba8-etc-machine-id\") pod \"cinder-db-sync-8lc4s\" (UID: \"08b9d931-b59f-4e6c-9081-c8b918d37ba8\") " pod="openstack/cinder-db-sync-8lc4s" Oct 07 08:13:22 crc kubenswrapper[4875]: I1007 08:13:22.932336 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/08b9d931-b59f-4e6c-9081-c8b918d37ba8-scripts\") pod \"cinder-db-sync-8lc4s\" (UID: \"08b9d931-b59f-4e6c-9081-c8b918d37ba8\") " pod="openstack/cinder-db-sync-8lc4s" Oct 07 08:13:22 crc kubenswrapper[4875]: I1007 08:13:22.932445 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/08b9d931-b59f-4e6c-9081-c8b918d37ba8-etc-machine-id\") pod \"cinder-db-sync-8lc4s\" (UID: \"08b9d931-b59f-4e6c-9081-c8b918d37ba8\") " pod="openstack/cinder-db-sync-8lc4s" Oct 07 08:13:22 crc kubenswrapper[4875]: I1007 08:13:22.932507 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/08b9d931-b59f-4e6c-9081-c8b918d37ba8-config-data\") pod \"cinder-db-sync-8lc4s\" (UID: \"08b9d931-b59f-4e6c-9081-c8b918d37ba8\") " pod="openstack/cinder-db-sync-8lc4s" Oct 07 08:13:22 crc kubenswrapper[4875]: I1007 08:13:22.932547 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/08b9d931-b59f-4e6c-9081-c8b918d37ba8-combined-ca-bundle\") pod \"cinder-db-sync-8lc4s\" (UID: \"08b9d931-b59f-4e6c-9081-c8b918d37ba8\") " pod="openstack/cinder-db-sync-8lc4s" Oct 07 08:13:22 crc kubenswrapper[4875]: I1007 08:13:22.932742 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s4csn\" (UniqueName: \"kubernetes.io/projected/08b9d931-b59f-4e6c-9081-c8b918d37ba8-kube-api-access-s4csn\") pod \"cinder-db-sync-8lc4s\" (UID: \"08b9d931-b59f-4e6c-9081-c8b918d37ba8\") " pod="openstack/cinder-db-sync-8lc4s" Oct 07 08:13:22 crc kubenswrapper[4875]: I1007 08:13:22.932772 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/08b9d931-b59f-4e6c-9081-c8b918d37ba8-db-sync-config-data\") pod \"cinder-db-sync-8lc4s\" (UID: \"08b9d931-b59f-4e6c-9081-c8b918d37ba8\") " pod="openstack/cinder-db-sync-8lc4s" Oct 07 08:13:22 crc kubenswrapper[4875]: I1007 08:13:22.936557 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/08b9d931-b59f-4e6c-9081-c8b918d37ba8-etc-machine-id\") pod \"cinder-db-sync-8lc4s\" (UID: \"08b9d931-b59f-4e6c-9081-c8b918d37ba8\") " pod="openstack/cinder-db-sync-8lc4s" Oct 07 08:13:22 crc kubenswrapper[4875]: I1007 08:13:22.940487 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/08b9d931-b59f-4e6c-9081-c8b918d37ba8-scripts\") pod \"cinder-db-sync-8lc4s\" (UID: \"08b9d931-b59f-4e6c-9081-c8b918d37ba8\") " pod="openstack/cinder-db-sync-8lc4s" Oct 07 08:13:22 crc kubenswrapper[4875]: I1007 08:13:22.948646 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/08b9d931-b59f-4e6c-9081-c8b918d37ba8-combined-ca-bundle\") pod \"cinder-db-sync-8lc4s\" (UID: \"08b9d931-b59f-4e6c-9081-c8b918d37ba8\") " pod="openstack/cinder-db-sync-8lc4s" Oct 07 08:13:22 crc kubenswrapper[4875]: I1007 08:13:22.948799 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/08b9d931-b59f-4e6c-9081-c8b918d37ba8-db-sync-config-data\") pod \"cinder-db-sync-8lc4s\" (UID: \"08b9d931-b59f-4e6c-9081-c8b918d37ba8\") " pod="openstack/cinder-db-sync-8lc4s" Oct 07 08:13:22 crc kubenswrapper[4875]: I1007 08:13:22.958076 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s4csn\" (UniqueName: \"kubernetes.io/projected/08b9d931-b59f-4e6c-9081-c8b918d37ba8-kube-api-access-s4csn\") pod \"cinder-db-sync-8lc4s\" (UID: \"08b9d931-b59f-4e6c-9081-c8b918d37ba8\") " pod="openstack/cinder-db-sync-8lc4s" Oct 07 08:13:22 crc kubenswrapper[4875]: I1007 08:13:22.959931 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/08b9d931-b59f-4e6c-9081-c8b918d37ba8-config-data\") pod \"cinder-db-sync-8lc4s\" (UID: \"08b9d931-b59f-4e6c-9081-c8b918d37ba8\") " pod="openstack/cinder-db-sync-8lc4s" Oct 07 08:13:23 crc kubenswrapper[4875]: I1007 08:13:23.062199 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 07 08:13:23 crc kubenswrapper[4875]: I1007 08:13:23.063024 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-8lc4s" Oct 07 08:13:23 crc kubenswrapper[4875]: I1007 08:13:23.367195 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-bbf5cc879-l454k" Oct 07 08:13:23 crc kubenswrapper[4875]: I1007 08:13:23.443951 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/59f07b18-1f61-4d86-9231-0ed7a485b7cb-ovsdbserver-nb\") pod \"59f07b18-1f61-4d86-9231-0ed7a485b7cb\" (UID: \"59f07b18-1f61-4d86-9231-0ed7a485b7cb\") " Oct 07 08:13:23 crc kubenswrapper[4875]: I1007 08:13:23.444113 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/59f07b18-1f61-4d86-9231-0ed7a485b7cb-dns-svc\") pod \"59f07b18-1f61-4d86-9231-0ed7a485b7cb\" (UID: \"59f07b18-1f61-4d86-9231-0ed7a485b7cb\") " Oct 07 08:13:23 crc kubenswrapper[4875]: I1007 08:13:23.444192 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9czg4\" (UniqueName: \"kubernetes.io/projected/59f07b18-1f61-4d86-9231-0ed7a485b7cb-kube-api-access-9czg4\") pod \"59f07b18-1f61-4d86-9231-0ed7a485b7cb\" (UID: \"59f07b18-1f61-4d86-9231-0ed7a485b7cb\") " Oct 07 08:13:23 crc kubenswrapper[4875]: I1007 08:13:23.444214 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/59f07b18-1f61-4d86-9231-0ed7a485b7cb-dns-swift-storage-0\") pod \"59f07b18-1f61-4d86-9231-0ed7a485b7cb\" (UID: \"59f07b18-1f61-4d86-9231-0ed7a485b7cb\") " Oct 07 08:13:23 crc kubenswrapper[4875]: I1007 08:13:23.444285 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/59f07b18-1f61-4d86-9231-0ed7a485b7cb-ovsdbserver-sb\") pod \"59f07b18-1f61-4d86-9231-0ed7a485b7cb\" (UID: \"59f07b18-1f61-4d86-9231-0ed7a485b7cb\") " Oct 07 08:13:23 crc kubenswrapper[4875]: I1007 08:13:23.444316 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/59f07b18-1f61-4d86-9231-0ed7a485b7cb-config\") pod \"59f07b18-1f61-4d86-9231-0ed7a485b7cb\" (UID: \"59f07b18-1f61-4d86-9231-0ed7a485b7cb\") " Oct 07 08:13:23 crc kubenswrapper[4875]: I1007 08:13:23.475366 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/59f07b18-1f61-4d86-9231-0ed7a485b7cb-kube-api-access-9czg4" (OuterVolumeSpecName: "kube-api-access-9czg4") pod "59f07b18-1f61-4d86-9231-0ed7a485b7cb" (UID: "59f07b18-1f61-4d86-9231-0ed7a485b7cb"). InnerVolumeSpecName "kube-api-access-9czg4". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 08:13:23 crc kubenswrapper[4875]: I1007 08:13:23.475922 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/59f07b18-1f61-4d86-9231-0ed7a485b7cb-config" (OuterVolumeSpecName: "config") pod "59f07b18-1f61-4d86-9231-0ed7a485b7cb" (UID: "59f07b18-1f61-4d86-9231-0ed7a485b7cb"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 08:13:23 crc kubenswrapper[4875]: I1007 08:13:23.482458 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/59f07b18-1f61-4d86-9231-0ed7a485b7cb-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "59f07b18-1f61-4d86-9231-0ed7a485b7cb" (UID: "59f07b18-1f61-4d86-9231-0ed7a485b7cb"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 08:13:23 crc kubenswrapper[4875]: I1007 08:13:23.484739 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-km8j5"] Oct 07 08:13:23 crc kubenswrapper[4875]: I1007 08:13:23.489183 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/59f07b18-1f61-4d86-9231-0ed7a485b7cb-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "59f07b18-1f61-4d86-9231-0ed7a485b7cb" (UID: "59f07b18-1f61-4d86-9231-0ed7a485b7cb"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 08:13:23 crc kubenswrapper[4875]: I1007 08:13:23.491028 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/59f07b18-1f61-4d86-9231-0ed7a485b7cb-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "59f07b18-1f61-4d86-9231-0ed7a485b7cb" (UID: "59f07b18-1f61-4d86-9231-0ed7a485b7cb"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 08:13:23 crc kubenswrapper[4875]: I1007 08:13:23.529440 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/59f07b18-1f61-4d86-9231-0ed7a485b7cb-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "59f07b18-1f61-4d86-9231-0ed7a485b7cb" (UID: "59f07b18-1f61-4d86-9231-0ed7a485b7cb"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 08:13:23 crc kubenswrapper[4875]: I1007 08:13:23.548245 4875 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/59f07b18-1f61-4d86-9231-0ed7a485b7cb-config\") on node \"crc\" DevicePath \"\"" Oct 07 08:13:23 crc kubenswrapper[4875]: I1007 08:13:23.548642 4875 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/59f07b18-1f61-4d86-9231-0ed7a485b7cb-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 07 08:13:23 crc kubenswrapper[4875]: I1007 08:13:23.548653 4875 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/59f07b18-1f61-4d86-9231-0ed7a485b7cb-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 07 08:13:23 crc kubenswrapper[4875]: I1007 08:13:23.548663 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9czg4\" (UniqueName: \"kubernetes.io/projected/59f07b18-1f61-4d86-9231-0ed7a485b7cb-kube-api-access-9czg4\") on node \"crc\" DevicePath \"\"" Oct 07 08:13:23 crc kubenswrapper[4875]: I1007 08:13:23.548671 4875 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/59f07b18-1f61-4d86-9231-0ed7a485b7cb-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Oct 07 08:13:23 crc kubenswrapper[4875]: I1007 08:13:23.548679 4875 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/59f07b18-1f61-4d86-9231-0ed7a485b7cb-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 07 08:13:23 crc kubenswrapper[4875]: I1007 08:13:23.677237 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-57f6dd6d75-v26bw"] Oct 07 08:13:23 crc kubenswrapper[4875]: W1007 08:13:23.681715 4875 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode118a2f5_641e_419c_b60a_f8f6634b6262.slice/crio-83db7f0f580e462da467575ff3a1956c492659a9f192b2bbaab3cbc9c9610d6c WatchSource:0}: Error finding container 83db7f0f580e462da467575ff3a1956c492659a9f192b2bbaab3cbc9c9610d6c: Status 404 returned error can't find the container with id 83db7f0f580e462da467575ff3a1956c492659a9f192b2bbaab3cbc9c9610d6c Oct 07 08:13:23 crc kubenswrapper[4875]: I1007 08:13:23.824494 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-8lc4s"] Oct 07 08:13:23 crc kubenswrapper[4875]: I1007 08:13:23.826326 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"85df9aa3-cf70-444e-99c5-7d3780c2d621","Type":"ContainerStarted","Data":"27d2ac9ec3fee9630df3fdd4811cae706746d775e77213280e0ea1a2037ecce5"} Oct 07 08:13:23 crc kubenswrapper[4875]: I1007 08:13:23.832303 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-56df8fb6b7-l2fn2" event={"ID":"5b0d35a4-cfa4-4c0a-8ca2-4db85aefc765","Type":"ContainerStarted","Data":"3e9aacff9d467b59a8621d49d0e53e11257a4e6e0170bece7f549aefa5f88482"} Oct 07 08:13:23 crc kubenswrapper[4875]: I1007 08:13:23.832400 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-56df8fb6b7-l2fn2" Oct 07 08:13:23 crc kubenswrapper[4875]: I1007 08:13:23.834079 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-57f6dd6d75-v26bw" event={"ID":"e118a2f5-641e-419c-b60a-f8f6634b6262","Type":"ContainerStarted","Data":"83db7f0f580e462da467575ff3a1956c492659a9f192b2bbaab3cbc9c9610d6c"} Oct 07 08:13:23 crc kubenswrapper[4875]: I1007 08:13:23.843222 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"46209aef-15e8-4cf8-8a66-ee09384cc053","Type":"ContainerStarted","Data":"8fc672291e8ea59f858dea16895b97935674fbaf12cfbb72854ed3669d8b48eb"} Oct 07 08:13:23 crc kubenswrapper[4875]: I1007 08:13:23.847587 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-km8j5" event={"ID":"7ede5949-4681-4699-befa-f13a645d1f4c","Type":"ContainerStarted","Data":"95d902de709c12ce4314a9047574e5c867a8306aad0a734d0c6b577b6f28a714"} Oct 07 08:13:23 crc kubenswrapper[4875]: I1007 08:13:23.854366 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-bbf5cc879-l454k" event={"ID":"59f07b18-1f61-4d86-9231-0ed7a485b7cb","Type":"ContainerDied","Data":"582108a5199604ef15396ca885d72276d93e37cdccff05a45d1792e4aae09786"} Oct 07 08:13:23 crc kubenswrapper[4875]: I1007 08:13:23.854432 4875 scope.go:117] "RemoveContainer" containerID="be47ab455377cb502b688e42b9bf4a78163651d2f8bb0db05913559b4487c6e9" Oct 07 08:13:23 crc kubenswrapper[4875]: I1007 08:13:23.854430 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-bbf5cc879-l454k" Oct 07 08:13:23 crc kubenswrapper[4875]: I1007 08:13:23.860287 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-56df8fb6b7-l2fn2" podStartSLOduration=3.860268378 podStartE2EDuration="3.860268378s" podCreationTimestamp="2025-10-07 08:13:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 08:13:23.849844415 +0000 UTC m=+1028.809614968" watchObservedRunningTime="2025-10-07 08:13:23.860268378 +0000 UTC m=+1028.820038921" Oct 07 08:13:23 crc kubenswrapper[4875]: I1007 08:13:23.911032 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-bbf5cc879-l454k"] Oct 07 08:13:23 crc kubenswrapper[4875]: I1007 08:13:23.924119 4875 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-bbf5cc879-l454k"] Oct 07 08:13:24 crc kubenswrapper[4875]: I1007 08:13:24.927760 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"85df9aa3-cf70-444e-99c5-7d3780c2d621","Type":"ContainerStarted","Data":"90a155a4fdfa4dd9dfa6a5f6f4777cb470596da38664d6e0679e405ff14657db"} Oct 07 08:13:24 crc kubenswrapper[4875]: I1007 08:13:24.929331 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-8lc4s" event={"ID":"08b9d931-b59f-4e6c-9081-c8b918d37ba8","Type":"ContainerStarted","Data":"c7e3c0cce439c3061ec08e44642ded727271be32b9936b9638a6471ce2568d5c"} Oct 07 08:13:24 crc kubenswrapper[4875]: I1007 08:13:24.932102 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"46209aef-15e8-4cf8-8a66-ee09384cc053","Type":"ContainerStarted","Data":"2b64d78ee9b59926f8046a8e1f9a2976903829afa57667f789354be6729e7e02"} Oct 07 08:13:24 crc kubenswrapper[4875]: I1007 08:13:24.937143 4875 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="85df9aa3-cf70-444e-99c5-7d3780c2d621" containerName="glance-log" containerID="cri-o://27d2ac9ec3fee9630df3fdd4811cae706746d775e77213280e0ea1a2037ecce5" gracePeriod=30 Oct 07 08:13:24 crc kubenswrapper[4875]: I1007 08:13:24.938452 4875 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="85df9aa3-cf70-444e-99c5-7d3780c2d621" containerName="glance-httpd" containerID="cri-o://90a155a4fdfa4dd9dfa6a5f6f4777cb470596da38664d6e0679e405ff14657db" gracePeriod=30 Oct 07 08:13:24 crc kubenswrapper[4875]: I1007 08:13:24.968004 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=4.967984254 podStartE2EDuration="4.967984254s" podCreationTimestamp="2025-10-07 08:13:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 08:13:24.965898377 +0000 UTC m=+1029.925668940" watchObservedRunningTime="2025-10-07 08:13:24.967984254 +0000 UTC m=+1029.927754797" Oct 07 08:13:25 crc kubenswrapper[4875]: I1007 08:13:25.714458 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="59f07b18-1f61-4d86-9231-0ed7a485b7cb" path="/var/lib/kubelet/pods/59f07b18-1f61-4d86-9231-0ed7a485b7cb/volumes" Oct 07 08:13:25 crc kubenswrapper[4875]: I1007 08:13:25.948760 4875 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="46209aef-15e8-4cf8-8a66-ee09384cc053" containerName="glance-log" containerID="cri-o://2b64d78ee9b59926f8046a8e1f9a2976903829afa57667f789354be6729e7e02" gracePeriod=30 Oct 07 08:13:25 crc kubenswrapper[4875]: I1007 08:13:25.948711 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"46209aef-15e8-4cf8-8a66-ee09384cc053","Type":"ContainerStarted","Data":"034268eef1cbeaa8e0115acddff968858f73e2d23a675b81c86a0251ff5a45d7"} Oct 07 08:13:25 crc kubenswrapper[4875]: I1007 08:13:25.949587 4875 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="46209aef-15e8-4cf8-8a66-ee09384cc053" containerName="glance-httpd" containerID="cri-o://034268eef1cbeaa8e0115acddff968858f73e2d23a675b81c86a0251ff5a45d7" gracePeriod=30 Oct 07 08:13:25 crc kubenswrapper[4875]: I1007 08:13:25.960799 4875 generic.go:334] "Generic (PLEG): container finished" podID="85df9aa3-cf70-444e-99c5-7d3780c2d621" containerID="90a155a4fdfa4dd9dfa6a5f6f4777cb470596da38664d6e0679e405ff14657db" exitCode=0 Oct 07 08:13:25 crc kubenswrapper[4875]: I1007 08:13:25.960832 4875 generic.go:334] "Generic (PLEG): container finished" podID="85df9aa3-cf70-444e-99c5-7d3780c2d621" containerID="27d2ac9ec3fee9630df3fdd4811cae706746d775e77213280e0ea1a2037ecce5" exitCode=143 Oct 07 08:13:25 crc kubenswrapper[4875]: I1007 08:13:25.960853 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"85df9aa3-cf70-444e-99c5-7d3780c2d621","Type":"ContainerDied","Data":"90a155a4fdfa4dd9dfa6a5f6f4777cb470596da38664d6e0679e405ff14657db"} Oct 07 08:13:25 crc kubenswrapper[4875]: I1007 08:13:25.960891 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"85df9aa3-cf70-444e-99c5-7d3780c2d621","Type":"ContainerDied","Data":"27d2ac9ec3fee9630df3fdd4811cae706746d775e77213280e0ea1a2037ecce5"} Oct 07 08:13:25 crc kubenswrapper[4875]: I1007 08:13:25.979073 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=5.979054796 podStartE2EDuration="5.979054796s" podCreationTimestamp="2025-10-07 08:13:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 08:13:25.978472268 +0000 UTC m=+1030.938242811" watchObservedRunningTime="2025-10-07 08:13:25.979054796 +0000 UTC m=+1030.938825339" Oct 07 08:13:26 crc kubenswrapper[4875]: I1007 08:13:26.995856 4875 generic.go:334] "Generic (PLEG): container finished" podID="78c5bd94-76f8-49ab-841a-c6bfe4087ab0" containerID="81d856a9695463b7906f8eb2f780ac834435136db46dcf26d9332e4ae59c40ac" exitCode=0 Oct 07 08:13:26 crc kubenswrapper[4875]: I1007 08:13:26.996088 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-bkg7r" event={"ID":"78c5bd94-76f8-49ab-841a-c6bfe4087ab0","Type":"ContainerDied","Data":"81d856a9695463b7906f8eb2f780ac834435136db46dcf26d9332e4ae59c40ac"} Oct 07 08:13:27 crc kubenswrapper[4875]: I1007 08:13:27.004010 4875 generic.go:334] "Generic (PLEG): container finished" podID="46209aef-15e8-4cf8-8a66-ee09384cc053" containerID="034268eef1cbeaa8e0115acddff968858f73e2d23a675b81c86a0251ff5a45d7" exitCode=143 Oct 07 08:13:27 crc kubenswrapper[4875]: I1007 08:13:27.004041 4875 generic.go:334] "Generic (PLEG): container finished" podID="46209aef-15e8-4cf8-8a66-ee09384cc053" containerID="2b64d78ee9b59926f8046a8e1f9a2976903829afa57667f789354be6729e7e02" exitCode=143 Oct 07 08:13:27 crc kubenswrapper[4875]: I1007 08:13:27.004080 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"46209aef-15e8-4cf8-8a66-ee09384cc053","Type":"ContainerDied","Data":"034268eef1cbeaa8e0115acddff968858f73e2d23a675b81c86a0251ff5a45d7"} Oct 07 08:13:27 crc kubenswrapper[4875]: I1007 08:13:27.004108 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"46209aef-15e8-4cf8-8a66-ee09384cc053","Type":"ContainerDied","Data":"2b64d78ee9b59926f8046a8e1f9a2976903829afa57667f789354be6729e7e02"} Oct 07 08:13:27 crc kubenswrapper[4875]: I1007 08:13:27.444709 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-sync-rrk4v"] Oct 07 08:13:27 crc kubenswrapper[4875]: E1007 08:13:27.446585 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="59f07b18-1f61-4d86-9231-0ed7a485b7cb" containerName="init" Oct 07 08:13:27 crc kubenswrapper[4875]: I1007 08:13:27.446612 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="59f07b18-1f61-4d86-9231-0ed7a485b7cb" containerName="init" Oct 07 08:13:27 crc kubenswrapper[4875]: I1007 08:13:27.447584 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="59f07b18-1f61-4d86-9231-0ed7a485b7cb" containerName="init" Oct 07 08:13:27 crc kubenswrapper[4875]: I1007 08:13:27.449696 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-rrk4v" Oct 07 08:13:27 crc kubenswrapper[4875]: I1007 08:13:27.453228 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Oct 07 08:13:27 crc kubenswrapper[4875]: I1007 08:13:27.454823 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-sfkpt" Oct 07 08:13:27 crc kubenswrapper[4875]: I1007 08:13:27.455743 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Oct 07 08:13:27 crc kubenswrapper[4875]: I1007 08:13:27.464246 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-rrk4v"] Oct 07 08:13:27 crc kubenswrapper[4875]: I1007 08:13:27.525414 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/927a5f2e-c935-47bb-b7b3-0efc834566ca-combined-ca-bundle\") pod \"neutron-db-sync-rrk4v\" (UID: \"927a5f2e-c935-47bb-b7b3-0efc834566ca\") " pod="openstack/neutron-db-sync-rrk4v" Oct 07 08:13:27 crc kubenswrapper[4875]: I1007 08:13:27.525700 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wfp2w\" (UniqueName: \"kubernetes.io/projected/927a5f2e-c935-47bb-b7b3-0efc834566ca-kube-api-access-wfp2w\") pod \"neutron-db-sync-rrk4v\" (UID: \"927a5f2e-c935-47bb-b7b3-0efc834566ca\") " pod="openstack/neutron-db-sync-rrk4v" Oct 07 08:13:27 crc kubenswrapper[4875]: I1007 08:13:27.525995 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/927a5f2e-c935-47bb-b7b3-0efc834566ca-config\") pod \"neutron-db-sync-rrk4v\" (UID: \"927a5f2e-c935-47bb-b7b3-0efc834566ca\") " pod="openstack/neutron-db-sync-rrk4v" Oct 07 08:13:27 crc kubenswrapper[4875]: I1007 08:13:27.627812 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/927a5f2e-c935-47bb-b7b3-0efc834566ca-config\") pod \"neutron-db-sync-rrk4v\" (UID: \"927a5f2e-c935-47bb-b7b3-0efc834566ca\") " pod="openstack/neutron-db-sync-rrk4v" Oct 07 08:13:27 crc kubenswrapper[4875]: I1007 08:13:27.627920 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/927a5f2e-c935-47bb-b7b3-0efc834566ca-combined-ca-bundle\") pod \"neutron-db-sync-rrk4v\" (UID: \"927a5f2e-c935-47bb-b7b3-0efc834566ca\") " pod="openstack/neutron-db-sync-rrk4v" Oct 07 08:13:27 crc kubenswrapper[4875]: I1007 08:13:27.628005 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wfp2w\" (UniqueName: \"kubernetes.io/projected/927a5f2e-c935-47bb-b7b3-0efc834566ca-kube-api-access-wfp2w\") pod \"neutron-db-sync-rrk4v\" (UID: \"927a5f2e-c935-47bb-b7b3-0efc834566ca\") " pod="openstack/neutron-db-sync-rrk4v" Oct 07 08:13:27 crc kubenswrapper[4875]: I1007 08:13:27.638992 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/927a5f2e-c935-47bb-b7b3-0efc834566ca-config\") pod \"neutron-db-sync-rrk4v\" (UID: \"927a5f2e-c935-47bb-b7b3-0efc834566ca\") " pod="openstack/neutron-db-sync-rrk4v" Oct 07 08:13:27 crc kubenswrapper[4875]: I1007 08:13:27.639329 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/927a5f2e-c935-47bb-b7b3-0efc834566ca-combined-ca-bundle\") pod \"neutron-db-sync-rrk4v\" (UID: \"927a5f2e-c935-47bb-b7b3-0efc834566ca\") " pod="openstack/neutron-db-sync-rrk4v" Oct 07 08:13:27 crc kubenswrapper[4875]: I1007 08:13:27.653358 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wfp2w\" (UniqueName: \"kubernetes.io/projected/927a5f2e-c935-47bb-b7b3-0efc834566ca-kube-api-access-wfp2w\") pod \"neutron-db-sync-rrk4v\" (UID: \"927a5f2e-c935-47bb-b7b3-0efc834566ca\") " pod="openstack/neutron-db-sync-rrk4v" Oct 07 08:13:27 crc kubenswrapper[4875]: I1007 08:13:27.792370 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-rrk4v" Oct 07 08:13:28 crc kubenswrapper[4875]: I1007 08:13:28.192271 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-84b9bd675-95vpd"] Oct 07 08:13:28 crc kubenswrapper[4875]: I1007 08:13:28.249158 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-8486dbbd8b-5n679"] Oct 07 08:13:28 crc kubenswrapper[4875]: I1007 08:13:28.251217 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-8486dbbd8b-5n679" Oct 07 08:13:28 crc kubenswrapper[4875]: I1007 08:13:28.258337 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-horizon-svc" Oct 07 08:13:28 crc kubenswrapper[4875]: I1007 08:13:28.276071 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-8486dbbd8b-5n679"] Oct 07 08:13:28 crc kubenswrapper[4875]: I1007 08:13:28.298713 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-57f6dd6d75-v26bw"] Oct 07 08:13:28 crc kubenswrapper[4875]: I1007 08:13:28.330283 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-64854f4c8-d67s8"] Oct 07 08:13:28 crc kubenswrapper[4875]: I1007 08:13:28.332060 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-64854f4c8-d67s8" Oct 07 08:13:28 crc kubenswrapper[4875]: I1007 08:13:28.346466 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-64854f4c8-d67s8"] Oct 07 08:13:28 crc kubenswrapper[4875]: I1007 08:13:28.349585 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/35b9d27a-06f2-4b00-917f-f078fdf1b1c2-horizon-tls-certs\") pod \"horizon-8486dbbd8b-5n679\" (UID: \"35b9d27a-06f2-4b00-917f-f078fdf1b1c2\") " pod="openstack/horizon-8486dbbd8b-5n679" Oct 07 08:13:28 crc kubenswrapper[4875]: I1007 08:13:28.349628 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/35b9d27a-06f2-4b00-917f-f078fdf1b1c2-scripts\") pod \"horizon-8486dbbd8b-5n679\" (UID: \"35b9d27a-06f2-4b00-917f-f078fdf1b1c2\") " pod="openstack/horizon-8486dbbd8b-5n679" Oct 07 08:13:28 crc kubenswrapper[4875]: I1007 08:13:28.349654 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/35b9d27a-06f2-4b00-917f-f078fdf1b1c2-horizon-secret-key\") pod \"horizon-8486dbbd8b-5n679\" (UID: \"35b9d27a-06f2-4b00-917f-f078fdf1b1c2\") " pod="openstack/horizon-8486dbbd8b-5n679" Oct 07 08:13:28 crc kubenswrapper[4875]: I1007 08:13:28.349705 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2j5r4\" (UniqueName: \"kubernetes.io/projected/35b9d27a-06f2-4b00-917f-f078fdf1b1c2-kube-api-access-2j5r4\") pod \"horizon-8486dbbd8b-5n679\" (UID: \"35b9d27a-06f2-4b00-917f-f078fdf1b1c2\") " pod="openstack/horizon-8486dbbd8b-5n679" Oct 07 08:13:28 crc kubenswrapper[4875]: I1007 08:13:28.349732 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/35b9d27a-06f2-4b00-917f-f078fdf1b1c2-logs\") pod \"horizon-8486dbbd8b-5n679\" (UID: \"35b9d27a-06f2-4b00-917f-f078fdf1b1c2\") " pod="openstack/horizon-8486dbbd8b-5n679" Oct 07 08:13:28 crc kubenswrapper[4875]: I1007 08:13:28.349769 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/35b9d27a-06f2-4b00-917f-f078fdf1b1c2-config-data\") pod \"horizon-8486dbbd8b-5n679\" (UID: \"35b9d27a-06f2-4b00-917f-f078fdf1b1c2\") " pod="openstack/horizon-8486dbbd8b-5n679" Oct 07 08:13:28 crc kubenswrapper[4875]: I1007 08:13:28.349824 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/35b9d27a-06f2-4b00-917f-f078fdf1b1c2-combined-ca-bundle\") pod \"horizon-8486dbbd8b-5n679\" (UID: \"35b9d27a-06f2-4b00-917f-f078fdf1b1c2\") " pod="openstack/horizon-8486dbbd8b-5n679" Oct 07 08:13:28 crc kubenswrapper[4875]: I1007 08:13:28.451288 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/35b9d27a-06f2-4b00-917f-f078fdf1b1c2-horizon-secret-key\") pod \"horizon-8486dbbd8b-5n679\" (UID: \"35b9d27a-06f2-4b00-917f-f078fdf1b1c2\") " pod="openstack/horizon-8486dbbd8b-5n679" Oct 07 08:13:28 crc kubenswrapper[4875]: I1007 08:13:28.451351 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cfb37cba-9925-4808-9b9f-6dfd2550c15e-logs\") pod \"horizon-64854f4c8-d67s8\" (UID: \"cfb37cba-9925-4808-9b9f-6dfd2550c15e\") " pod="openstack/horizon-64854f4c8-d67s8" Oct 07 08:13:28 crc kubenswrapper[4875]: I1007 08:13:28.451379 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/cfb37cba-9925-4808-9b9f-6dfd2550c15e-scripts\") pod \"horizon-64854f4c8-d67s8\" (UID: \"cfb37cba-9925-4808-9b9f-6dfd2550c15e\") " pod="openstack/horizon-64854f4c8-d67s8" Oct 07 08:13:28 crc kubenswrapper[4875]: I1007 08:13:28.451462 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z8nn9\" (UniqueName: \"kubernetes.io/projected/cfb37cba-9925-4808-9b9f-6dfd2550c15e-kube-api-access-z8nn9\") pod \"horizon-64854f4c8-d67s8\" (UID: \"cfb37cba-9925-4808-9b9f-6dfd2550c15e\") " pod="openstack/horizon-64854f4c8-d67s8" Oct 07 08:13:28 crc kubenswrapper[4875]: I1007 08:13:28.451504 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/cfb37cba-9925-4808-9b9f-6dfd2550c15e-horizon-tls-certs\") pod \"horizon-64854f4c8-d67s8\" (UID: \"cfb37cba-9925-4808-9b9f-6dfd2550c15e\") " pod="openstack/horizon-64854f4c8-d67s8" Oct 07 08:13:28 crc kubenswrapper[4875]: I1007 08:13:28.451627 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2j5r4\" (UniqueName: \"kubernetes.io/projected/35b9d27a-06f2-4b00-917f-f078fdf1b1c2-kube-api-access-2j5r4\") pod \"horizon-8486dbbd8b-5n679\" (UID: \"35b9d27a-06f2-4b00-917f-f078fdf1b1c2\") " pod="openstack/horizon-8486dbbd8b-5n679" Oct 07 08:13:28 crc kubenswrapper[4875]: I1007 08:13:28.451720 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cfb37cba-9925-4808-9b9f-6dfd2550c15e-combined-ca-bundle\") pod \"horizon-64854f4c8-d67s8\" (UID: \"cfb37cba-9925-4808-9b9f-6dfd2550c15e\") " pod="openstack/horizon-64854f4c8-d67s8" Oct 07 08:13:28 crc kubenswrapper[4875]: I1007 08:13:28.451753 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/35b9d27a-06f2-4b00-917f-f078fdf1b1c2-logs\") pod \"horizon-8486dbbd8b-5n679\" (UID: \"35b9d27a-06f2-4b00-917f-f078fdf1b1c2\") " pod="openstack/horizon-8486dbbd8b-5n679" Oct 07 08:13:28 crc kubenswrapper[4875]: I1007 08:13:28.451858 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/35b9d27a-06f2-4b00-917f-f078fdf1b1c2-config-data\") pod \"horizon-8486dbbd8b-5n679\" (UID: \"35b9d27a-06f2-4b00-917f-f078fdf1b1c2\") " pod="openstack/horizon-8486dbbd8b-5n679" Oct 07 08:13:28 crc kubenswrapper[4875]: I1007 08:13:28.451951 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/cfb37cba-9925-4808-9b9f-6dfd2550c15e-config-data\") pod \"horizon-64854f4c8-d67s8\" (UID: \"cfb37cba-9925-4808-9b9f-6dfd2550c15e\") " pod="openstack/horizon-64854f4c8-d67s8" Oct 07 08:13:28 crc kubenswrapper[4875]: I1007 08:13:28.452072 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/cfb37cba-9925-4808-9b9f-6dfd2550c15e-horizon-secret-key\") pod \"horizon-64854f4c8-d67s8\" (UID: \"cfb37cba-9925-4808-9b9f-6dfd2550c15e\") " pod="openstack/horizon-64854f4c8-d67s8" Oct 07 08:13:28 crc kubenswrapper[4875]: I1007 08:13:28.452098 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/35b9d27a-06f2-4b00-917f-f078fdf1b1c2-combined-ca-bundle\") pod \"horizon-8486dbbd8b-5n679\" (UID: \"35b9d27a-06f2-4b00-917f-f078fdf1b1c2\") " pod="openstack/horizon-8486dbbd8b-5n679" Oct 07 08:13:28 crc kubenswrapper[4875]: I1007 08:13:28.452157 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/35b9d27a-06f2-4b00-917f-f078fdf1b1c2-horizon-tls-certs\") pod \"horizon-8486dbbd8b-5n679\" (UID: \"35b9d27a-06f2-4b00-917f-f078fdf1b1c2\") " pod="openstack/horizon-8486dbbd8b-5n679" Oct 07 08:13:28 crc kubenswrapper[4875]: I1007 08:13:28.452197 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/35b9d27a-06f2-4b00-917f-f078fdf1b1c2-scripts\") pod \"horizon-8486dbbd8b-5n679\" (UID: \"35b9d27a-06f2-4b00-917f-f078fdf1b1c2\") " pod="openstack/horizon-8486dbbd8b-5n679" Oct 07 08:13:28 crc kubenswrapper[4875]: I1007 08:13:28.452522 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/35b9d27a-06f2-4b00-917f-f078fdf1b1c2-logs\") pod \"horizon-8486dbbd8b-5n679\" (UID: \"35b9d27a-06f2-4b00-917f-f078fdf1b1c2\") " pod="openstack/horizon-8486dbbd8b-5n679" Oct 07 08:13:28 crc kubenswrapper[4875]: I1007 08:13:28.452990 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/35b9d27a-06f2-4b00-917f-f078fdf1b1c2-scripts\") pod \"horizon-8486dbbd8b-5n679\" (UID: \"35b9d27a-06f2-4b00-917f-f078fdf1b1c2\") " pod="openstack/horizon-8486dbbd8b-5n679" Oct 07 08:13:28 crc kubenswrapper[4875]: I1007 08:13:28.453364 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/35b9d27a-06f2-4b00-917f-f078fdf1b1c2-config-data\") pod \"horizon-8486dbbd8b-5n679\" (UID: \"35b9d27a-06f2-4b00-917f-f078fdf1b1c2\") " pod="openstack/horizon-8486dbbd8b-5n679" Oct 07 08:13:28 crc kubenswrapper[4875]: I1007 08:13:28.455915 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/35b9d27a-06f2-4b00-917f-f078fdf1b1c2-combined-ca-bundle\") pod \"horizon-8486dbbd8b-5n679\" (UID: \"35b9d27a-06f2-4b00-917f-f078fdf1b1c2\") " pod="openstack/horizon-8486dbbd8b-5n679" Oct 07 08:13:28 crc kubenswrapper[4875]: I1007 08:13:28.456305 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/35b9d27a-06f2-4b00-917f-f078fdf1b1c2-horizon-tls-certs\") pod \"horizon-8486dbbd8b-5n679\" (UID: \"35b9d27a-06f2-4b00-917f-f078fdf1b1c2\") " pod="openstack/horizon-8486dbbd8b-5n679" Oct 07 08:13:28 crc kubenswrapper[4875]: I1007 08:13:28.468765 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2j5r4\" (UniqueName: \"kubernetes.io/projected/35b9d27a-06f2-4b00-917f-f078fdf1b1c2-kube-api-access-2j5r4\") pod \"horizon-8486dbbd8b-5n679\" (UID: \"35b9d27a-06f2-4b00-917f-f078fdf1b1c2\") " pod="openstack/horizon-8486dbbd8b-5n679" Oct 07 08:13:28 crc kubenswrapper[4875]: I1007 08:13:28.474428 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/35b9d27a-06f2-4b00-917f-f078fdf1b1c2-horizon-secret-key\") pod \"horizon-8486dbbd8b-5n679\" (UID: \"35b9d27a-06f2-4b00-917f-f078fdf1b1c2\") " pod="openstack/horizon-8486dbbd8b-5n679" Oct 07 08:13:28 crc kubenswrapper[4875]: I1007 08:13:28.554315 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/cfb37cba-9925-4808-9b9f-6dfd2550c15e-config-data\") pod \"horizon-64854f4c8-d67s8\" (UID: \"cfb37cba-9925-4808-9b9f-6dfd2550c15e\") " pod="openstack/horizon-64854f4c8-d67s8" Oct 07 08:13:28 crc kubenswrapper[4875]: I1007 08:13:28.554406 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/cfb37cba-9925-4808-9b9f-6dfd2550c15e-horizon-secret-key\") pod \"horizon-64854f4c8-d67s8\" (UID: \"cfb37cba-9925-4808-9b9f-6dfd2550c15e\") " pod="openstack/horizon-64854f4c8-d67s8" Oct 07 08:13:28 crc kubenswrapper[4875]: I1007 08:13:28.554456 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cfb37cba-9925-4808-9b9f-6dfd2550c15e-logs\") pod \"horizon-64854f4c8-d67s8\" (UID: \"cfb37cba-9925-4808-9b9f-6dfd2550c15e\") " pod="openstack/horizon-64854f4c8-d67s8" Oct 07 08:13:28 crc kubenswrapper[4875]: I1007 08:13:28.554476 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/cfb37cba-9925-4808-9b9f-6dfd2550c15e-scripts\") pod \"horizon-64854f4c8-d67s8\" (UID: \"cfb37cba-9925-4808-9b9f-6dfd2550c15e\") " pod="openstack/horizon-64854f4c8-d67s8" Oct 07 08:13:28 crc kubenswrapper[4875]: I1007 08:13:28.554519 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z8nn9\" (UniqueName: \"kubernetes.io/projected/cfb37cba-9925-4808-9b9f-6dfd2550c15e-kube-api-access-z8nn9\") pod \"horizon-64854f4c8-d67s8\" (UID: \"cfb37cba-9925-4808-9b9f-6dfd2550c15e\") " pod="openstack/horizon-64854f4c8-d67s8" Oct 07 08:13:28 crc kubenswrapper[4875]: I1007 08:13:28.554541 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/cfb37cba-9925-4808-9b9f-6dfd2550c15e-horizon-tls-certs\") pod \"horizon-64854f4c8-d67s8\" (UID: \"cfb37cba-9925-4808-9b9f-6dfd2550c15e\") " pod="openstack/horizon-64854f4c8-d67s8" Oct 07 08:13:28 crc kubenswrapper[4875]: I1007 08:13:28.554584 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cfb37cba-9925-4808-9b9f-6dfd2550c15e-combined-ca-bundle\") pod \"horizon-64854f4c8-d67s8\" (UID: \"cfb37cba-9925-4808-9b9f-6dfd2550c15e\") " pod="openstack/horizon-64854f4c8-d67s8" Oct 07 08:13:28 crc kubenswrapper[4875]: I1007 08:13:28.555306 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cfb37cba-9925-4808-9b9f-6dfd2550c15e-logs\") pod \"horizon-64854f4c8-d67s8\" (UID: \"cfb37cba-9925-4808-9b9f-6dfd2550c15e\") " pod="openstack/horizon-64854f4c8-d67s8" Oct 07 08:13:28 crc kubenswrapper[4875]: I1007 08:13:28.555665 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/cfb37cba-9925-4808-9b9f-6dfd2550c15e-scripts\") pod \"horizon-64854f4c8-d67s8\" (UID: \"cfb37cba-9925-4808-9b9f-6dfd2550c15e\") " pod="openstack/horizon-64854f4c8-d67s8" Oct 07 08:13:28 crc kubenswrapper[4875]: I1007 08:13:28.556256 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/cfb37cba-9925-4808-9b9f-6dfd2550c15e-config-data\") pod \"horizon-64854f4c8-d67s8\" (UID: \"cfb37cba-9925-4808-9b9f-6dfd2550c15e\") " pod="openstack/horizon-64854f4c8-d67s8" Oct 07 08:13:28 crc kubenswrapper[4875]: I1007 08:13:28.560601 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cfb37cba-9925-4808-9b9f-6dfd2550c15e-combined-ca-bundle\") pod \"horizon-64854f4c8-d67s8\" (UID: \"cfb37cba-9925-4808-9b9f-6dfd2550c15e\") " pod="openstack/horizon-64854f4c8-d67s8" Oct 07 08:13:28 crc kubenswrapper[4875]: I1007 08:13:28.561123 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/cfb37cba-9925-4808-9b9f-6dfd2550c15e-horizon-secret-key\") pod \"horizon-64854f4c8-d67s8\" (UID: \"cfb37cba-9925-4808-9b9f-6dfd2550c15e\") " pod="openstack/horizon-64854f4c8-d67s8" Oct 07 08:13:28 crc kubenswrapper[4875]: I1007 08:13:28.562975 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/cfb37cba-9925-4808-9b9f-6dfd2550c15e-horizon-tls-certs\") pod \"horizon-64854f4c8-d67s8\" (UID: \"cfb37cba-9925-4808-9b9f-6dfd2550c15e\") " pod="openstack/horizon-64854f4c8-d67s8" Oct 07 08:13:28 crc kubenswrapper[4875]: I1007 08:13:28.575493 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z8nn9\" (UniqueName: \"kubernetes.io/projected/cfb37cba-9925-4808-9b9f-6dfd2550c15e-kube-api-access-z8nn9\") pod \"horizon-64854f4c8-d67s8\" (UID: \"cfb37cba-9925-4808-9b9f-6dfd2550c15e\") " pod="openstack/horizon-64854f4c8-d67s8" Oct 07 08:13:28 crc kubenswrapper[4875]: I1007 08:13:28.586796 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-8486dbbd8b-5n679" Oct 07 08:13:28 crc kubenswrapper[4875]: I1007 08:13:28.664659 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-64854f4c8-d67s8" Oct 07 08:13:30 crc kubenswrapper[4875]: I1007 08:13:30.865111 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-56df8fb6b7-l2fn2" Oct 07 08:13:30 crc kubenswrapper[4875]: I1007 08:13:30.927594 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5f59b8f679-pgzlv"] Oct 07 08:13:30 crc kubenswrapper[4875]: I1007 08:13:30.928202 4875 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5f59b8f679-pgzlv" podUID="a3eb6579-5ad7-4817-8937-c9a6ca868920" containerName="dnsmasq-dns" containerID="cri-o://177b7130f64a1946d1b93f049b45ee9a2bb403c591d4a0c0e34b481470431644" gracePeriod=10 Oct 07 08:13:31 crc kubenswrapper[4875]: I1007 08:13:31.678594 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 07 08:13:31 crc kubenswrapper[4875]: I1007 08:13:31.823495 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/85df9aa3-cf70-444e-99c5-7d3780c2d621-config-data\") pod \"85df9aa3-cf70-444e-99c5-7d3780c2d621\" (UID: \"85df9aa3-cf70-444e-99c5-7d3780c2d621\") " Oct 07 08:13:31 crc kubenswrapper[4875]: I1007 08:13:31.823562 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/85df9aa3-cf70-444e-99c5-7d3780c2d621-scripts\") pod \"85df9aa3-cf70-444e-99c5-7d3780c2d621\" (UID: \"85df9aa3-cf70-444e-99c5-7d3780c2d621\") " Oct 07 08:13:31 crc kubenswrapper[4875]: I1007 08:13:31.823606 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/85df9aa3-cf70-444e-99c5-7d3780c2d621-combined-ca-bundle\") pod \"85df9aa3-cf70-444e-99c5-7d3780c2d621\" (UID: \"85df9aa3-cf70-444e-99c5-7d3780c2d621\") " Oct 07 08:13:31 crc kubenswrapper[4875]: I1007 08:13:31.823712 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/85df9aa3-cf70-444e-99c5-7d3780c2d621-public-tls-certs\") pod \"85df9aa3-cf70-444e-99c5-7d3780c2d621\" (UID: \"85df9aa3-cf70-444e-99c5-7d3780c2d621\") " Oct 07 08:13:31 crc kubenswrapper[4875]: I1007 08:13:31.823740 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/85df9aa3-cf70-444e-99c5-7d3780c2d621-logs\") pod \"85df9aa3-cf70-444e-99c5-7d3780c2d621\" (UID: \"85df9aa3-cf70-444e-99c5-7d3780c2d621\") " Oct 07 08:13:31 crc kubenswrapper[4875]: I1007 08:13:31.823831 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-59z6p\" (UniqueName: \"kubernetes.io/projected/85df9aa3-cf70-444e-99c5-7d3780c2d621-kube-api-access-59z6p\") pod \"85df9aa3-cf70-444e-99c5-7d3780c2d621\" (UID: \"85df9aa3-cf70-444e-99c5-7d3780c2d621\") " Oct 07 08:13:31 crc kubenswrapper[4875]: I1007 08:13:31.824006 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"85df9aa3-cf70-444e-99c5-7d3780c2d621\" (UID: \"85df9aa3-cf70-444e-99c5-7d3780c2d621\") " Oct 07 08:13:31 crc kubenswrapper[4875]: I1007 08:13:31.824067 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/85df9aa3-cf70-444e-99c5-7d3780c2d621-httpd-run\") pod \"85df9aa3-cf70-444e-99c5-7d3780c2d621\" (UID: \"85df9aa3-cf70-444e-99c5-7d3780c2d621\") " Oct 07 08:13:31 crc kubenswrapper[4875]: I1007 08:13:31.825250 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/85df9aa3-cf70-444e-99c5-7d3780c2d621-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "85df9aa3-cf70-444e-99c5-7d3780c2d621" (UID: "85df9aa3-cf70-444e-99c5-7d3780c2d621"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 08:13:31 crc kubenswrapper[4875]: I1007 08:13:31.825394 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/85df9aa3-cf70-444e-99c5-7d3780c2d621-logs" (OuterVolumeSpecName: "logs") pod "85df9aa3-cf70-444e-99c5-7d3780c2d621" (UID: "85df9aa3-cf70-444e-99c5-7d3780c2d621"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 08:13:31 crc kubenswrapper[4875]: I1007 08:13:31.831404 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/85df9aa3-cf70-444e-99c5-7d3780c2d621-kube-api-access-59z6p" (OuterVolumeSpecName: "kube-api-access-59z6p") pod "85df9aa3-cf70-444e-99c5-7d3780c2d621" (UID: "85df9aa3-cf70-444e-99c5-7d3780c2d621"). InnerVolumeSpecName "kube-api-access-59z6p". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 08:13:31 crc kubenswrapper[4875]: I1007 08:13:31.834217 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/85df9aa3-cf70-444e-99c5-7d3780c2d621-scripts" (OuterVolumeSpecName: "scripts") pod "85df9aa3-cf70-444e-99c5-7d3780c2d621" (UID: "85df9aa3-cf70-444e-99c5-7d3780c2d621"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:13:31 crc kubenswrapper[4875]: I1007 08:13:31.837039 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage12-crc" (OuterVolumeSpecName: "glance") pod "85df9aa3-cf70-444e-99c5-7d3780c2d621" (UID: "85df9aa3-cf70-444e-99c5-7d3780c2d621"). InnerVolumeSpecName "local-storage12-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 07 08:13:31 crc kubenswrapper[4875]: I1007 08:13:31.857124 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/85df9aa3-cf70-444e-99c5-7d3780c2d621-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "85df9aa3-cf70-444e-99c5-7d3780c2d621" (UID: "85df9aa3-cf70-444e-99c5-7d3780c2d621"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:13:31 crc kubenswrapper[4875]: I1007 08:13:31.876446 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/85df9aa3-cf70-444e-99c5-7d3780c2d621-config-data" (OuterVolumeSpecName: "config-data") pod "85df9aa3-cf70-444e-99c5-7d3780c2d621" (UID: "85df9aa3-cf70-444e-99c5-7d3780c2d621"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:13:31 crc kubenswrapper[4875]: I1007 08:13:31.878794 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/85df9aa3-cf70-444e-99c5-7d3780c2d621-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "85df9aa3-cf70-444e-99c5-7d3780c2d621" (UID: "85df9aa3-cf70-444e-99c5-7d3780c2d621"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:13:31 crc kubenswrapper[4875]: I1007 08:13:31.926406 4875 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/85df9aa3-cf70-444e-99c5-7d3780c2d621-public-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 07 08:13:31 crc kubenswrapper[4875]: I1007 08:13:31.926440 4875 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/85df9aa3-cf70-444e-99c5-7d3780c2d621-logs\") on node \"crc\" DevicePath \"\"" Oct 07 08:13:31 crc kubenswrapper[4875]: I1007 08:13:31.926455 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-59z6p\" (UniqueName: \"kubernetes.io/projected/85df9aa3-cf70-444e-99c5-7d3780c2d621-kube-api-access-59z6p\") on node \"crc\" DevicePath \"\"" Oct 07 08:13:31 crc kubenswrapper[4875]: I1007 08:13:31.926493 4875 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") on node \"crc\" " Oct 07 08:13:31 crc kubenswrapper[4875]: I1007 08:13:31.926503 4875 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/85df9aa3-cf70-444e-99c5-7d3780c2d621-httpd-run\") on node \"crc\" DevicePath \"\"" Oct 07 08:13:31 crc kubenswrapper[4875]: I1007 08:13:31.926512 4875 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/85df9aa3-cf70-444e-99c5-7d3780c2d621-config-data\") on node \"crc\" DevicePath \"\"" Oct 07 08:13:31 crc kubenswrapper[4875]: I1007 08:13:31.926521 4875 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/85df9aa3-cf70-444e-99c5-7d3780c2d621-scripts\") on node \"crc\" DevicePath \"\"" Oct 07 08:13:31 crc kubenswrapper[4875]: I1007 08:13:31.926529 4875 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/85df9aa3-cf70-444e-99c5-7d3780c2d621-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 08:13:31 crc kubenswrapper[4875]: I1007 08:13:31.950909 4875 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage12-crc" (UniqueName: "kubernetes.io/local-volume/local-storage12-crc") on node "crc" Oct 07 08:13:32 crc kubenswrapper[4875]: I1007 08:13:32.028710 4875 reconciler_common.go:293] "Volume detached for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") on node \"crc\" DevicePath \"\"" Oct 07 08:13:32 crc kubenswrapper[4875]: I1007 08:13:32.061670 4875 generic.go:334] "Generic (PLEG): container finished" podID="a3eb6579-5ad7-4817-8937-c9a6ca868920" containerID="177b7130f64a1946d1b93f049b45ee9a2bb403c591d4a0c0e34b481470431644" exitCode=0 Oct 07 08:13:32 crc kubenswrapper[4875]: I1007 08:13:32.061756 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5f59b8f679-pgzlv" event={"ID":"a3eb6579-5ad7-4817-8937-c9a6ca868920","Type":"ContainerDied","Data":"177b7130f64a1946d1b93f049b45ee9a2bb403c591d4a0c0e34b481470431644"} Oct 07 08:13:32 crc kubenswrapper[4875]: I1007 08:13:32.063849 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"85df9aa3-cf70-444e-99c5-7d3780c2d621","Type":"ContainerDied","Data":"1e125de36983cc6d16ee158b452dc4d73a4d89e40edb0ca614e8cb989bbc4f1e"} Oct 07 08:13:32 crc kubenswrapper[4875]: I1007 08:13:32.063935 4875 scope.go:117] "RemoveContainer" containerID="90a155a4fdfa4dd9dfa6a5f6f4777cb470596da38664d6e0679e405ff14657db" Oct 07 08:13:32 crc kubenswrapper[4875]: I1007 08:13:32.063935 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 07 08:13:32 crc kubenswrapper[4875]: I1007 08:13:32.108057 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 07 08:13:32 crc kubenswrapper[4875]: I1007 08:13:32.129380 4875 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 07 08:13:32 crc kubenswrapper[4875]: I1007 08:13:32.140325 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Oct 07 08:13:32 crc kubenswrapper[4875]: E1007 08:13:32.140927 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="85df9aa3-cf70-444e-99c5-7d3780c2d621" containerName="glance-log" Oct 07 08:13:32 crc kubenswrapper[4875]: I1007 08:13:32.140954 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="85df9aa3-cf70-444e-99c5-7d3780c2d621" containerName="glance-log" Oct 07 08:13:32 crc kubenswrapper[4875]: E1007 08:13:32.140985 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="85df9aa3-cf70-444e-99c5-7d3780c2d621" containerName="glance-httpd" Oct 07 08:13:32 crc kubenswrapper[4875]: I1007 08:13:32.140995 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="85df9aa3-cf70-444e-99c5-7d3780c2d621" containerName="glance-httpd" Oct 07 08:13:32 crc kubenswrapper[4875]: I1007 08:13:32.141244 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="85df9aa3-cf70-444e-99c5-7d3780c2d621" containerName="glance-httpd" Oct 07 08:13:32 crc kubenswrapper[4875]: I1007 08:13:32.141291 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="85df9aa3-cf70-444e-99c5-7d3780c2d621" containerName="glance-log" Oct 07 08:13:32 crc kubenswrapper[4875]: I1007 08:13:32.155249 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 07 08:13:32 crc kubenswrapper[4875]: I1007 08:13:32.155381 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 07 08:13:32 crc kubenswrapper[4875]: I1007 08:13:32.159080 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Oct 07 08:13:32 crc kubenswrapper[4875]: I1007 08:13:32.172811 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Oct 07 08:13:32 crc kubenswrapper[4875]: I1007 08:13:32.233843 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9b9f241f-e30b-416b-8a42-666f2fc72a79-logs\") pod \"glance-default-external-api-0\" (UID: \"9b9f241f-e30b-416b-8a42-666f2fc72a79\") " pod="openstack/glance-default-external-api-0" Oct 07 08:13:32 crc kubenswrapper[4875]: I1007 08:13:32.233929 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-external-api-0\" (UID: \"9b9f241f-e30b-416b-8a42-666f2fc72a79\") " pod="openstack/glance-default-external-api-0" Oct 07 08:13:32 crc kubenswrapper[4875]: I1007 08:13:32.233984 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9b9f241f-e30b-416b-8a42-666f2fc72a79-config-data\") pod \"glance-default-external-api-0\" (UID: \"9b9f241f-e30b-416b-8a42-666f2fc72a79\") " pod="openstack/glance-default-external-api-0" Oct 07 08:13:32 crc kubenswrapper[4875]: I1007 08:13:32.234004 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9b9f241f-e30b-416b-8a42-666f2fc72a79-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"9b9f241f-e30b-416b-8a42-666f2fc72a79\") " pod="openstack/glance-default-external-api-0" Oct 07 08:13:32 crc kubenswrapper[4875]: I1007 08:13:32.234033 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ttnwr\" (UniqueName: \"kubernetes.io/projected/9b9f241f-e30b-416b-8a42-666f2fc72a79-kube-api-access-ttnwr\") pod \"glance-default-external-api-0\" (UID: \"9b9f241f-e30b-416b-8a42-666f2fc72a79\") " pod="openstack/glance-default-external-api-0" Oct 07 08:13:32 crc kubenswrapper[4875]: I1007 08:13:32.234051 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/9b9f241f-e30b-416b-8a42-666f2fc72a79-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"9b9f241f-e30b-416b-8a42-666f2fc72a79\") " pod="openstack/glance-default-external-api-0" Oct 07 08:13:32 crc kubenswrapper[4875]: I1007 08:13:32.234092 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9b9f241f-e30b-416b-8a42-666f2fc72a79-scripts\") pod \"glance-default-external-api-0\" (UID: \"9b9f241f-e30b-416b-8a42-666f2fc72a79\") " pod="openstack/glance-default-external-api-0" Oct 07 08:13:32 crc kubenswrapper[4875]: I1007 08:13:32.234122 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/9b9f241f-e30b-416b-8a42-666f2fc72a79-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"9b9f241f-e30b-416b-8a42-666f2fc72a79\") " pod="openstack/glance-default-external-api-0" Oct 07 08:13:32 crc kubenswrapper[4875]: I1007 08:13:32.336479 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9b9f241f-e30b-416b-8a42-666f2fc72a79-logs\") pod \"glance-default-external-api-0\" (UID: \"9b9f241f-e30b-416b-8a42-666f2fc72a79\") " pod="openstack/glance-default-external-api-0" Oct 07 08:13:32 crc kubenswrapper[4875]: I1007 08:13:32.336555 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-external-api-0\" (UID: \"9b9f241f-e30b-416b-8a42-666f2fc72a79\") " pod="openstack/glance-default-external-api-0" Oct 07 08:13:32 crc kubenswrapper[4875]: I1007 08:13:32.336629 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9b9f241f-e30b-416b-8a42-666f2fc72a79-config-data\") pod \"glance-default-external-api-0\" (UID: \"9b9f241f-e30b-416b-8a42-666f2fc72a79\") " pod="openstack/glance-default-external-api-0" Oct 07 08:13:32 crc kubenswrapper[4875]: I1007 08:13:32.336657 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9b9f241f-e30b-416b-8a42-666f2fc72a79-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"9b9f241f-e30b-416b-8a42-666f2fc72a79\") " pod="openstack/glance-default-external-api-0" Oct 07 08:13:32 crc kubenswrapper[4875]: I1007 08:13:32.336692 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ttnwr\" (UniqueName: \"kubernetes.io/projected/9b9f241f-e30b-416b-8a42-666f2fc72a79-kube-api-access-ttnwr\") pod \"glance-default-external-api-0\" (UID: \"9b9f241f-e30b-416b-8a42-666f2fc72a79\") " pod="openstack/glance-default-external-api-0" Oct 07 08:13:32 crc kubenswrapper[4875]: I1007 08:13:32.336714 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/9b9f241f-e30b-416b-8a42-666f2fc72a79-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"9b9f241f-e30b-416b-8a42-666f2fc72a79\") " pod="openstack/glance-default-external-api-0" Oct 07 08:13:32 crc kubenswrapper[4875]: I1007 08:13:32.336772 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9b9f241f-e30b-416b-8a42-666f2fc72a79-scripts\") pod \"glance-default-external-api-0\" (UID: \"9b9f241f-e30b-416b-8a42-666f2fc72a79\") " pod="openstack/glance-default-external-api-0" Oct 07 08:13:32 crc kubenswrapper[4875]: I1007 08:13:32.336814 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/9b9f241f-e30b-416b-8a42-666f2fc72a79-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"9b9f241f-e30b-416b-8a42-666f2fc72a79\") " pod="openstack/glance-default-external-api-0" Oct 07 08:13:32 crc kubenswrapper[4875]: I1007 08:13:32.337238 4875 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-external-api-0\" (UID: \"9b9f241f-e30b-416b-8a42-666f2fc72a79\") device mount path \"/mnt/openstack/pv12\"" pod="openstack/glance-default-external-api-0" Oct 07 08:13:32 crc kubenswrapper[4875]: I1007 08:13:32.338083 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/9b9f241f-e30b-416b-8a42-666f2fc72a79-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"9b9f241f-e30b-416b-8a42-666f2fc72a79\") " pod="openstack/glance-default-external-api-0" Oct 07 08:13:32 crc kubenswrapper[4875]: I1007 08:13:32.338402 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9b9f241f-e30b-416b-8a42-666f2fc72a79-logs\") pod \"glance-default-external-api-0\" (UID: \"9b9f241f-e30b-416b-8a42-666f2fc72a79\") " pod="openstack/glance-default-external-api-0" Oct 07 08:13:32 crc kubenswrapper[4875]: I1007 08:13:32.342510 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9b9f241f-e30b-416b-8a42-666f2fc72a79-config-data\") pod \"glance-default-external-api-0\" (UID: \"9b9f241f-e30b-416b-8a42-666f2fc72a79\") " pod="openstack/glance-default-external-api-0" Oct 07 08:13:32 crc kubenswrapper[4875]: I1007 08:13:32.342635 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9b9f241f-e30b-416b-8a42-666f2fc72a79-scripts\") pod \"glance-default-external-api-0\" (UID: \"9b9f241f-e30b-416b-8a42-666f2fc72a79\") " pod="openstack/glance-default-external-api-0" Oct 07 08:13:32 crc kubenswrapper[4875]: I1007 08:13:32.343496 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9b9f241f-e30b-416b-8a42-666f2fc72a79-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"9b9f241f-e30b-416b-8a42-666f2fc72a79\") " pod="openstack/glance-default-external-api-0" Oct 07 08:13:32 crc kubenswrapper[4875]: I1007 08:13:32.344802 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/9b9f241f-e30b-416b-8a42-666f2fc72a79-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"9b9f241f-e30b-416b-8a42-666f2fc72a79\") " pod="openstack/glance-default-external-api-0" Oct 07 08:13:32 crc kubenswrapper[4875]: I1007 08:13:32.353754 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ttnwr\" (UniqueName: \"kubernetes.io/projected/9b9f241f-e30b-416b-8a42-666f2fc72a79-kube-api-access-ttnwr\") pod \"glance-default-external-api-0\" (UID: \"9b9f241f-e30b-416b-8a42-666f2fc72a79\") " pod="openstack/glance-default-external-api-0" Oct 07 08:13:32 crc kubenswrapper[4875]: I1007 08:13:32.367643 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-external-api-0\" (UID: \"9b9f241f-e30b-416b-8a42-666f2fc72a79\") " pod="openstack/glance-default-external-api-0" Oct 07 08:13:32 crc kubenswrapper[4875]: I1007 08:13:32.481246 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 07 08:13:33 crc kubenswrapper[4875]: I1007 08:13:33.711669 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="85df9aa3-cf70-444e-99c5-7d3780c2d621" path="/var/lib/kubelet/pods/85df9aa3-cf70-444e-99c5-7d3780c2d621/volumes" Oct 07 08:13:33 crc kubenswrapper[4875]: I1007 08:13:33.728973 4875 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-5f59b8f679-pgzlv" podUID="a3eb6579-5ad7-4817-8937-c9a6ca868920" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.127:5353: connect: connection refused" Oct 07 08:13:38 crc kubenswrapper[4875]: E1007 08:13:38.515163 4875 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-horizon:current-podified" Oct 07 08:13:38 crc kubenswrapper[4875]: E1007 08:13:38.515905 4875 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:horizon-log,Image:quay.io/podified-antelope-centos9/openstack-horizon:current-podified,Command:[/bin/bash],Args:[-c tail -n+1 -F /var/log/horizon/horizon.log],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n58ch549hb9h58h56ch68bh5fdh59dhbchbdhcch8bh5cfhcbh89hd7h698h54h57chb9h555h59bh657h5dfh5cfh54h568h5cfh588h8bh557h595q,ValueFrom:nil,},EnvVar{Name:ENABLE_DESIGNATE,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_HEAT,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_IRONIC,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_MANILA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_OCTAVIA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_WATCHER,Value:no,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},EnvVar{Name:UNPACK_THEME,Value:true,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:logs,ReadOnly:false,MountPath:/var/log/horizon,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-k2l2g,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*48,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*true,RunAsGroup:*42400,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod horizon-57f6dd6d75-v26bw_openstack(e118a2f5-641e-419c-b60a-f8f6634b6262): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 07 08:13:38 crc kubenswrapper[4875]: E1007 08:13:38.525930 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"horizon-log\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"horizon\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-horizon:current-podified\\\"\"]" pod="openstack/horizon-57f6dd6d75-v26bw" podUID="e118a2f5-641e-419c-b60a-f8f6634b6262" Oct 07 08:13:38 crc kubenswrapper[4875]: E1007 08:13:38.546778 4875 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-horizon:current-podified" Oct 07 08:13:38 crc kubenswrapper[4875]: E1007 08:13:38.546994 4875 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:horizon-log,Image:quay.io/podified-antelope-centos9/openstack-horizon:current-podified,Command:[/bin/bash],Args:[-c tail -n+1 -F /var/log/horizon/horizon.log],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n659h98hd7hch555h546h5dbh56fhd6hbdh79h597h97h597h59bh69h556h8dh58ch59fh75h674hc5h5bfh58fh67fh579h5fh544h5f6h99h68fq,ValueFrom:nil,},EnvVar{Name:ENABLE_DESIGNATE,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_HEAT,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_IRONIC,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_MANILA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_OCTAVIA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_WATCHER,Value:no,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},EnvVar{Name:UNPACK_THEME,Value:true,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:logs,ReadOnly:false,MountPath:/var/log/horizon,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-2bgn5,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*48,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*true,RunAsGroup:*42400,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod horizon-6dd767d9c-bdd6z_openstack(4fc34509-2d69-45f6-95ba-75b873dc58ed): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 07 08:13:38 crc kubenswrapper[4875]: E1007 08:13:38.549034 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"horizon-log\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"horizon\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-horizon:current-podified\\\"\"]" pod="openstack/horizon-6dd767d9c-bdd6z" podUID="4fc34509-2d69-45f6-95ba-75b873dc58ed" Oct 07 08:13:38 crc kubenswrapper[4875]: E1007 08:13:38.584466 4875 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-horizon:current-podified" Oct 07 08:13:38 crc kubenswrapper[4875]: E1007 08:13:38.584666 4875 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:horizon-log,Image:quay.io/podified-antelope-centos9/openstack-horizon:current-podified,Command:[/bin/bash],Args:[-c tail -n+1 -F /var/log/horizon/horizon.log],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n58fh5c5h9dh66ch644h7chf8h547h5cdh677h54dh5b4h5c4h54bh9dh97h596hf4h68h5b7h5d5hc8h64h585h5fbh5c6h65ch56bh554h65dh58dh5dbq,ValueFrom:nil,},EnvVar{Name:ENABLE_DESIGNATE,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_HEAT,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_IRONIC,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_MANILA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_OCTAVIA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_WATCHER,Value:no,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},EnvVar{Name:UNPACK_THEME,Value:true,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:logs,ReadOnly:false,MountPath:/var/log/horizon,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-2j6pl,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*48,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*true,RunAsGroup:*42400,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod horizon-84b9bd675-95vpd_openstack(0796acc2-62bc-446e-aba7-72d4065a9e3b): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 07 08:13:38 crc kubenswrapper[4875]: E1007 08:13:38.588442 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"horizon-log\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"horizon\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-horizon:current-podified\\\"\"]" pod="openstack/horizon-84b9bd675-95vpd" podUID="0796acc2-62bc-446e-aba7-72d4065a9e3b" Oct 07 08:13:38 crc kubenswrapper[4875]: I1007 08:13:38.631323 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-bkg7r" Oct 07 08:13:38 crc kubenswrapper[4875]: I1007 08:13:38.662587 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/78c5bd94-76f8-49ab-841a-c6bfe4087ab0-fernet-keys\") pod \"78c5bd94-76f8-49ab-841a-c6bfe4087ab0\" (UID: \"78c5bd94-76f8-49ab-841a-c6bfe4087ab0\") " Oct 07 08:13:38 crc kubenswrapper[4875]: I1007 08:13:38.662672 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/78c5bd94-76f8-49ab-841a-c6bfe4087ab0-config-data\") pod \"78c5bd94-76f8-49ab-841a-c6bfe4087ab0\" (UID: \"78c5bd94-76f8-49ab-841a-c6bfe4087ab0\") " Oct 07 08:13:38 crc kubenswrapper[4875]: I1007 08:13:38.662700 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/78c5bd94-76f8-49ab-841a-c6bfe4087ab0-credential-keys\") pod \"78c5bd94-76f8-49ab-841a-c6bfe4087ab0\" (UID: \"78c5bd94-76f8-49ab-841a-c6bfe4087ab0\") " Oct 07 08:13:38 crc kubenswrapper[4875]: I1007 08:13:38.662748 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/78c5bd94-76f8-49ab-841a-c6bfe4087ab0-combined-ca-bundle\") pod \"78c5bd94-76f8-49ab-841a-c6bfe4087ab0\" (UID: \"78c5bd94-76f8-49ab-841a-c6bfe4087ab0\") " Oct 07 08:13:38 crc kubenswrapper[4875]: I1007 08:13:38.662777 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nhdrf\" (UniqueName: \"kubernetes.io/projected/78c5bd94-76f8-49ab-841a-c6bfe4087ab0-kube-api-access-nhdrf\") pod \"78c5bd94-76f8-49ab-841a-c6bfe4087ab0\" (UID: \"78c5bd94-76f8-49ab-841a-c6bfe4087ab0\") " Oct 07 08:13:38 crc kubenswrapper[4875]: I1007 08:13:38.662955 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/78c5bd94-76f8-49ab-841a-c6bfe4087ab0-scripts\") pod \"78c5bd94-76f8-49ab-841a-c6bfe4087ab0\" (UID: \"78c5bd94-76f8-49ab-841a-c6bfe4087ab0\") " Oct 07 08:13:38 crc kubenswrapper[4875]: I1007 08:13:38.668790 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/78c5bd94-76f8-49ab-841a-c6bfe4087ab0-scripts" (OuterVolumeSpecName: "scripts") pod "78c5bd94-76f8-49ab-841a-c6bfe4087ab0" (UID: "78c5bd94-76f8-49ab-841a-c6bfe4087ab0"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:13:38 crc kubenswrapper[4875]: I1007 08:13:38.670032 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/78c5bd94-76f8-49ab-841a-c6bfe4087ab0-kube-api-access-nhdrf" (OuterVolumeSpecName: "kube-api-access-nhdrf") pod "78c5bd94-76f8-49ab-841a-c6bfe4087ab0" (UID: "78c5bd94-76f8-49ab-841a-c6bfe4087ab0"). InnerVolumeSpecName "kube-api-access-nhdrf". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 08:13:38 crc kubenswrapper[4875]: I1007 08:13:38.670988 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/78c5bd94-76f8-49ab-841a-c6bfe4087ab0-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "78c5bd94-76f8-49ab-841a-c6bfe4087ab0" (UID: "78c5bd94-76f8-49ab-841a-c6bfe4087ab0"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:13:38 crc kubenswrapper[4875]: I1007 08:13:38.673044 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/78c5bd94-76f8-49ab-841a-c6bfe4087ab0-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "78c5bd94-76f8-49ab-841a-c6bfe4087ab0" (UID: "78c5bd94-76f8-49ab-841a-c6bfe4087ab0"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:13:38 crc kubenswrapper[4875]: I1007 08:13:38.695166 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/78c5bd94-76f8-49ab-841a-c6bfe4087ab0-config-data" (OuterVolumeSpecName: "config-data") pod "78c5bd94-76f8-49ab-841a-c6bfe4087ab0" (UID: "78c5bd94-76f8-49ab-841a-c6bfe4087ab0"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:13:38 crc kubenswrapper[4875]: I1007 08:13:38.695196 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/78c5bd94-76f8-49ab-841a-c6bfe4087ab0-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "78c5bd94-76f8-49ab-841a-c6bfe4087ab0" (UID: "78c5bd94-76f8-49ab-841a-c6bfe4087ab0"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:13:38 crc kubenswrapper[4875]: I1007 08:13:38.767342 4875 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/78c5bd94-76f8-49ab-841a-c6bfe4087ab0-scripts\") on node \"crc\" DevicePath \"\"" Oct 07 08:13:38 crc kubenswrapper[4875]: I1007 08:13:38.767586 4875 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/78c5bd94-76f8-49ab-841a-c6bfe4087ab0-fernet-keys\") on node \"crc\" DevicePath \"\"" Oct 07 08:13:38 crc kubenswrapper[4875]: I1007 08:13:38.767598 4875 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/78c5bd94-76f8-49ab-841a-c6bfe4087ab0-config-data\") on node \"crc\" DevicePath \"\"" Oct 07 08:13:38 crc kubenswrapper[4875]: I1007 08:13:38.767608 4875 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/78c5bd94-76f8-49ab-841a-c6bfe4087ab0-credential-keys\") on node \"crc\" DevicePath \"\"" Oct 07 08:13:38 crc kubenswrapper[4875]: I1007 08:13:38.767619 4875 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/78c5bd94-76f8-49ab-841a-c6bfe4087ab0-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 08:13:38 crc kubenswrapper[4875]: I1007 08:13:38.767649 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nhdrf\" (UniqueName: \"kubernetes.io/projected/78c5bd94-76f8-49ab-841a-c6bfe4087ab0-kube-api-access-nhdrf\") on node \"crc\" DevicePath \"\"" Oct 07 08:13:39 crc kubenswrapper[4875]: I1007 08:13:39.119957 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-bkg7r" event={"ID":"78c5bd94-76f8-49ab-841a-c6bfe4087ab0","Type":"ContainerDied","Data":"4c7c14d8083509b8f0aa8b35fd29564e1a1b5b22a5a787792b0bf3b53084d485"} Oct 07 08:13:39 crc kubenswrapper[4875]: I1007 08:13:39.120012 4875 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4c7c14d8083509b8f0aa8b35fd29564e1a1b5b22a5a787792b0bf3b53084d485" Oct 07 08:13:39 crc kubenswrapper[4875]: I1007 08:13:39.120068 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-bkg7r" Oct 07 08:13:39 crc kubenswrapper[4875]: E1007 08:13:39.287761 4875 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-barbican-api:current-podified" Oct 07 08:13:39 crc kubenswrapper[4875]: E1007 08:13:39.287938 4875 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:barbican-db-sync,Image:quay.io/podified-antelope-centos9/openstack-barbican-api:current-podified,Command:[/bin/bash],Args:[-c barbican-manage db upgrade],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/barbican/barbican.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-ntpkv,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42403,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:*42403,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod barbican-db-sync-km8j5_openstack(7ede5949-4681-4699-befa-f13a645d1f4c): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 07 08:13:39 crc kubenswrapper[4875]: E1007 08:13:39.289125 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"barbican-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/barbican-db-sync-km8j5" podUID="7ede5949-4681-4699-befa-f13a645d1f4c" Oct 07 08:13:39 crc kubenswrapper[4875]: I1007 08:13:39.725402 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-bkg7r"] Oct 07 08:13:39 crc kubenswrapper[4875]: I1007 08:13:39.733410 4875 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-bkg7r"] Oct 07 08:13:39 crc kubenswrapper[4875]: I1007 08:13:39.842051 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-48kgl"] Oct 07 08:13:39 crc kubenswrapper[4875]: E1007 08:13:39.842514 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="78c5bd94-76f8-49ab-841a-c6bfe4087ab0" containerName="keystone-bootstrap" Oct 07 08:13:39 crc kubenswrapper[4875]: I1007 08:13:39.842535 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="78c5bd94-76f8-49ab-841a-c6bfe4087ab0" containerName="keystone-bootstrap" Oct 07 08:13:39 crc kubenswrapper[4875]: I1007 08:13:39.842717 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="78c5bd94-76f8-49ab-841a-c6bfe4087ab0" containerName="keystone-bootstrap" Oct 07 08:13:39 crc kubenswrapper[4875]: I1007 08:13:39.843431 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-48kgl" Oct 07 08:13:39 crc kubenswrapper[4875]: I1007 08:13:39.846918 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Oct 07 08:13:39 crc kubenswrapper[4875]: I1007 08:13:39.846989 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Oct 07 08:13:39 crc kubenswrapper[4875]: I1007 08:13:39.847096 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Oct 07 08:13:39 crc kubenswrapper[4875]: I1007 08:13:39.847786 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-5bgfb" Oct 07 08:13:39 crc kubenswrapper[4875]: I1007 08:13:39.850531 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-48kgl"] Oct 07 08:13:39 crc kubenswrapper[4875]: I1007 08:13:39.902028 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8c8adc3b-9b0c-4fda-b20c-59d4c455fe72-combined-ca-bundle\") pod \"keystone-bootstrap-48kgl\" (UID: \"8c8adc3b-9b0c-4fda-b20c-59d4c455fe72\") " pod="openstack/keystone-bootstrap-48kgl" Oct 07 08:13:39 crc kubenswrapper[4875]: I1007 08:13:39.902111 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8c8adc3b-9b0c-4fda-b20c-59d4c455fe72-scripts\") pod \"keystone-bootstrap-48kgl\" (UID: \"8c8adc3b-9b0c-4fda-b20c-59d4c455fe72\") " pod="openstack/keystone-bootstrap-48kgl" Oct 07 08:13:39 crc kubenswrapper[4875]: I1007 08:13:39.902217 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8c8adc3b-9b0c-4fda-b20c-59d4c455fe72-config-data\") pod \"keystone-bootstrap-48kgl\" (UID: \"8c8adc3b-9b0c-4fda-b20c-59d4c455fe72\") " pod="openstack/keystone-bootstrap-48kgl" Oct 07 08:13:39 crc kubenswrapper[4875]: I1007 08:13:39.902241 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xvt72\" (UniqueName: \"kubernetes.io/projected/8c8adc3b-9b0c-4fda-b20c-59d4c455fe72-kube-api-access-xvt72\") pod \"keystone-bootstrap-48kgl\" (UID: \"8c8adc3b-9b0c-4fda-b20c-59d4c455fe72\") " pod="openstack/keystone-bootstrap-48kgl" Oct 07 08:13:39 crc kubenswrapper[4875]: I1007 08:13:39.902313 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/8c8adc3b-9b0c-4fda-b20c-59d4c455fe72-credential-keys\") pod \"keystone-bootstrap-48kgl\" (UID: \"8c8adc3b-9b0c-4fda-b20c-59d4c455fe72\") " pod="openstack/keystone-bootstrap-48kgl" Oct 07 08:13:39 crc kubenswrapper[4875]: I1007 08:13:39.902359 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/8c8adc3b-9b0c-4fda-b20c-59d4c455fe72-fernet-keys\") pod \"keystone-bootstrap-48kgl\" (UID: \"8c8adc3b-9b0c-4fda-b20c-59d4c455fe72\") " pod="openstack/keystone-bootstrap-48kgl" Oct 07 08:13:40 crc kubenswrapper[4875]: I1007 08:13:40.003986 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/8c8adc3b-9b0c-4fda-b20c-59d4c455fe72-credential-keys\") pod \"keystone-bootstrap-48kgl\" (UID: \"8c8adc3b-9b0c-4fda-b20c-59d4c455fe72\") " pod="openstack/keystone-bootstrap-48kgl" Oct 07 08:13:40 crc kubenswrapper[4875]: I1007 08:13:40.004039 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/8c8adc3b-9b0c-4fda-b20c-59d4c455fe72-fernet-keys\") pod \"keystone-bootstrap-48kgl\" (UID: \"8c8adc3b-9b0c-4fda-b20c-59d4c455fe72\") " pod="openstack/keystone-bootstrap-48kgl" Oct 07 08:13:40 crc kubenswrapper[4875]: I1007 08:13:40.004081 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8c8adc3b-9b0c-4fda-b20c-59d4c455fe72-combined-ca-bundle\") pod \"keystone-bootstrap-48kgl\" (UID: \"8c8adc3b-9b0c-4fda-b20c-59d4c455fe72\") " pod="openstack/keystone-bootstrap-48kgl" Oct 07 08:13:40 crc kubenswrapper[4875]: I1007 08:13:40.004113 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8c8adc3b-9b0c-4fda-b20c-59d4c455fe72-scripts\") pod \"keystone-bootstrap-48kgl\" (UID: \"8c8adc3b-9b0c-4fda-b20c-59d4c455fe72\") " pod="openstack/keystone-bootstrap-48kgl" Oct 07 08:13:40 crc kubenswrapper[4875]: I1007 08:13:40.004176 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8c8adc3b-9b0c-4fda-b20c-59d4c455fe72-config-data\") pod \"keystone-bootstrap-48kgl\" (UID: \"8c8adc3b-9b0c-4fda-b20c-59d4c455fe72\") " pod="openstack/keystone-bootstrap-48kgl" Oct 07 08:13:40 crc kubenswrapper[4875]: I1007 08:13:40.004193 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xvt72\" (UniqueName: \"kubernetes.io/projected/8c8adc3b-9b0c-4fda-b20c-59d4c455fe72-kube-api-access-xvt72\") pod \"keystone-bootstrap-48kgl\" (UID: \"8c8adc3b-9b0c-4fda-b20c-59d4c455fe72\") " pod="openstack/keystone-bootstrap-48kgl" Oct 07 08:13:40 crc kubenswrapper[4875]: I1007 08:13:40.010213 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8c8adc3b-9b0c-4fda-b20c-59d4c455fe72-scripts\") pod \"keystone-bootstrap-48kgl\" (UID: \"8c8adc3b-9b0c-4fda-b20c-59d4c455fe72\") " pod="openstack/keystone-bootstrap-48kgl" Oct 07 08:13:40 crc kubenswrapper[4875]: I1007 08:13:40.010210 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8c8adc3b-9b0c-4fda-b20c-59d4c455fe72-combined-ca-bundle\") pod \"keystone-bootstrap-48kgl\" (UID: \"8c8adc3b-9b0c-4fda-b20c-59d4c455fe72\") " pod="openstack/keystone-bootstrap-48kgl" Oct 07 08:13:40 crc kubenswrapper[4875]: I1007 08:13:40.010464 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8c8adc3b-9b0c-4fda-b20c-59d4c455fe72-config-data\") pod \"keystone-bootstrap-48kgl\" (UID: \"8c8adc3b-9b0c-4fda-b20c-59d4c455fe72\") " pod="openstack/keystone-bootstrap-48kgl" Oct 07 08:13:40 crc kubenswrapper[4875]: I1007 08:13:40.014395 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/8c8adc3b-9b0c-4fda-b20c-59d4c455fe72-credential-keys\") pod \"keystone-bootstrap-48kgl\" (UID: \"8c8adc3b-9b0c-4fda-b20c-59d4c455fe72\") " pod="openstack/keystone-bootstrap-48kgl" Oct 07 08:13:40 crc kubenswrapper[4875]: I1007 08:13:40.023605 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/8c8adc3b-9b0c-4fda-b20c-59d4c455fe72-fernet-keys\") pod \"keystone-bootstrap-48kgl\" (UID: \"8c8adc3b-9b0c-4fda-b20c-59d4c455fe72\") " pod="openstack/keystone-bootstrap-48kgl" Oct 07 08:13:40 crc kubenswrapper[4875]: I1007 08:13:40.024567 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xvt72\" (UniqueName: \"kubernetes.io/projected/8c8adc3b-9b0c-4fda-b20c-59d4c455fe72-kube-api-access-xvt72\") pod \"keystone-bootstrap-48kgl\" (UID: \"8c8adc3b-9b0c-4fda-b20c-59d4c455fe72\") " pod="openstack/keystone-bootstrap-48kgl" Oct 07 08:13:40 crc kubenswrapper[4875]: E1007 08:13:40.129940 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"barbican-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-barbican-api:current-podified\\\"\"" pod="openstack/barbican-db-sync-km8j5" podUID="7ede5949-4681-4699-befa-f13a645d1f4c" Oct 07 08:13:40 crc kubenswrapper[4875]: I1007 08:13:40.199901 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-48kgl" Oct 07 08:13:41 crc kubenswrapper[4875]: I1007 08:13:41.712265 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="78c5bd94-76f8-49ab-841a-c6bfe4087ab0" path="/var/lib/kubelet/pods/78c5bd94-76f8-49ab-841a-c6bfe4087ab0/volumes" Oct 07 08:13:43 crc kubenswrapper[4875]: I1007 08:13:43.729621 4875 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-5f59b8f679-pgzlv" podUID="a3eb6579-5ad7-4817-8937-c9a6ca868920" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.127:5353: i/o timeout" Oct 07 08:13:46 crc kubenswrapper[4875]: I1007 08:13:46.703177 4875 scope.go:117] "RemoveContainer" containerID="27d2ac9ec3fee9630df3fdd4811cae706746d775e77213280e0ea1a2037ecce5" Oct 07 08:13:46 crc kubenswrapper[4875]: I1007 08:13:46.842418 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5f59b8f679-pgzlv" Oct 07 08:13:46 crc kubenswrapper[4875]: I1007 08:13:46.848419 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 07 08:13:46 crc kubenswrapper[4875]: I1007 08:13:46.855179 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-6dd767d9c-bdd6z" Oct 07 08:13:46 crc kubenswrapper[4875]: I1007 08:13:46.879269 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-57f6dd6d75-v26bw" Oct 07 08:13:46 crc kubenswrapper[4875]: I1007 08:13:46.892298 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-84b9bd675-95vpd" Oct 07 08:13:46 crc kubenswrapper[4875]: I1007 08:13:46.951175 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a3eb6579-5ad7-4817-8937-c9a6ca868920-ovsdbserver-nb\") pod \"a3eb6579-5ad7-4817-8937-c9a6ca868920\" (UID: \"a3eb6579-5ad7-4817-8937-c9a6ca868920\") " Oct 07 08:13:46 crc kubenswrapper[4875]: I1007 08:13:46.951232 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/46209aef-15e8-4cf8-8a66-ee09384cc053-scripts\") pod \"46209aef-15e8-4cf8-8a66-ee09384cc053\" (UID: \"46209aef-15e8-4cf8-8a66-ee09384cc053\") " Oct 07 08:13:46 crc kubenswrapper[4875]: I1007 08:13:46.951260 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/46209aef-15e8-4cf8-8a66-ee09384cc053-config-data\") pod \"46209aef-15e8-4cf8-8a66-ee09384cc053\" (UID: \"46209aef-15e8-4cf8-8a66-ee09384cc053\") " Oct 07 08:13:46 crc kubenswrapper[4875]: I1007 08:13:46.951289 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/46209aef-15e8-4cf8-8a66-ee09384cc053-combined-ca-bundle\") pod \"46209aef-15e8-4cf8-8a66-ee09384cc053\" (UID: \"46209aef-15e8-4cf8-8a66-ee09384cc053\") " Oct 07 08:13:46 crc kubenswrapper[4875]: I1007 08:13:46.954615 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cfbb2\" (UniqueName: \"kubernetes.io/projected/46209aef-15e8-4cf8-8a66-ee09384cc053-kube-api-access-cfbb2\") pod \"46209aef-15e8-4cf8-8a66-ee09384cc053\" (UID: \"46209aef-15e8-4cf8-8a66-ee09384cc053\") " Oct 07 08:13:46 crc kubenswrapper[4875]: I1007 08:13:46.954754 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8j64v\" (UniqueName: \"kubernetes.io/projected/a3eb6579-5ad7-4817-8937-c9a6ca868920-kube-api-access-8j64v\") pod \"a3eb6579-5ad7-4817-8937-c9a6ca868920\" (UID: \"a3eb6579-5ad7-4817-8937-c9a6ca868920\") " Oct 07 08:13:46 crc kubenswrapper[4875]: I1007 08:13:46.956551 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a3eb6579-5ad7-4817-8937-c9a6ca868920-dns-svc\") pod \"a3eb6579-5ad7-4817-8937-c9a6ca868920\" (UID: \"a3eb6579-5ad7-4817-8937-c9a6ca868920\") " Oct 07 08:13:46 crc kubenswrapper[4875]: I1007 08:13:46.956680 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a3eb6579-5ad7-4817-8937-c9a6ca868920-config\") pod \"a3eb6579-5ad7-4817-8937-c9a6ca868920\" (UID: \"a3eb6579-5ad7-4817-8937-c9a6ca868920\") " Oct 07 08:13:46 crc kubenswrapper[4875]: I1007 08:13:46.956749 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a3eb6579-5ad7-4817-8937-c9a6ca868920-ovsdbserver-sb\") pod \"a3eb6579-5ad7-4817-8937-c9a6ca868920\" (UID: \"a3eb6579-5ad7-4817-8937-c9a6ca868920\") " Oct 07 08:13:46 crc kubenswrapper[4875]: I1007 08:13:46.956834 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/46209aef-15e8-4cf8-8a66-ee09384cc053-internal-tls-certs\") pod \"46209aef-15e8-4cf8-8a66-ee09384cc053\" (UID: \"46209aef-15e8-4cf8-8a66-ee09384cc053\") " Oct 07 08:13:46 crc kubenswrapper[4875]: I1007 08:13:46.957095 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/46209aef-15e8-4cf8-8a66-ee09384cc053-httpd-run\") pod \"46209aef-15e8-4cf8-8a66-ee09384cc053\" (UID: \"46209aef-15e8-4cf8-8a66-ee09384cc053\") " Oct 07 08:13:46 crc kubenswrapper[4875]: I1007 08:13:46.957156 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/46209aef-15e8-4cf8-8a66-ee09384cc053-logs\") pod \"46209aef-15e8-4cf8-8a66-ee09384cc053\" (UID: \"46209aef-15e8-4cf8-8a66-ee09384cc053\") " Oct 07 08:13:46 crc kubenswrapper[4875]: I1007 08:13:46.957214 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/a3eb6579-5ad7-4817-8937-c9a6ca868920-dns-swift-storage-0\") pod \"a3eb6579-5ad7-4817-8937-c9a6ca868920\" (UID: \"a3eb6579-5ad7-4817-8937-c9a6ca868920\") " Oct 07 08:13:46 crc kubenswrapper[4875]: I1007 08:13:46.957288 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"46209aef-15e8-4cf8-8a66-ee09384cc053\" (UID: \"46209aef-15e8-4cf8-8a66-ee09384cc053\") " Oct 07 08:13:46 crc kubenswrapper[4875]: I1007 08:13:46.966161 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a3eb6579-5ad7-4817-8937-c9a6ca868920-kube-api-access-8j64v" (OuterVolumeSpecName: "kube-api-access-8j64v") pod "a3eb6579-5ad7-4817-8937-c9a6ca868920" (UID: "a3eb6579-5ad7-4817-8937-c9a6ca868920"). InnerVolumeSpecName "kube-api-access-8j64v". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 08:13:46 crc kubenswrapper[4875]: I1007 08:13:46.966241 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/46209aef-15e8-4cf8-8a66-ee09384cc053-kube-api-access-cfbb2" (OuterVolumeSpecName: "kube-api-access-cfbb2") pod "46209aef-15e8-4cf8-8a66-ee09384cc053" (UID: "46209aef-15e8-4cf8-8a66-ee09384cc053"). InnerVolumeSpecName "kube-api-access-cfbb2". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 08:13:46 crc kubenswrapper[4875]: I1007 08:13:46.966562 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/46209aef-15e8-4cf8-8a66-ee09384cc053-logs" (OuterVolumeSpecName: "logs") pod "46209aef-15e8-4cf8-8a66-ee09384cc053" (UID: "46209aef-15e8-4cf8-8a66-ee09384cc053"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 08:13:46 crc kubenswrapper[4875]: I1007 08:13:46.976471 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/46209aef-15e8-4cf8-8a66-ee09384cc053-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "46209aef-15e8-4cf8-8a66-ee09384cc053" (UID: "46209aef-15e8-4cf8-8a66-ee09384cc053"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 08:13:46 crc kubenswrapper[4875]: I1007 08:13:46.982389 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage07-crc" (OuterVolumeSpecName: "glance") pod "46209aef-15e8-4cf8-8a66-ee09384cc053" (UID: "46209aef-15e8-4cf8-8a66-ee09384cc053"). InnerVolumeSpecName "local-storage07-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 07 08:13:46 crc kubenswrapper[4875]: I1007 08:13:46.982490 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/46209aef-15e8-4cf8-8a66-ee09384cc053-scripts" (OuterVolumeSpecName: "scripts") pod "46209aef-15e8-4cf8-8a66-ee09384cc053" (UID: "46209aef-15e8-4cf8-8a66-ee09384cc053"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:13:47 crc kubenswrapper[4875]: I1007 08:13:47.029259 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/46209aef-15e8-4cf8-8a66-ee09384cc053-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "46209aef-15e8-4cf8-8a66-ee09384cc053" (UID: "46209aef-15e8-4cf8-8a66-ee09384cc053"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:13:47 crc kubenswrapper[4875]: I1007 08:13:47.030409 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a3eb6579-5ad7-4817-8937-c9a6ca868920-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "a3eb6579-5ad7-4817-8937-c9a6ca868920" (UID: "a3eb6579-5ad7-4817-8937-c9a6ca868920"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 08:13:47 crc kubenswrapper[4875]: I1007 08:13:47.039565 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a3eb6579-5ad7-4817-8937-c9a6ca868920-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "a3eb6579-5ad7-4817-8937-c9a6ca868920" (UID: "a3eb6579-5ad7-4817-8937-c9a6ca868920"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 08:13:47 crc kubenswrapper[4875]: I1007 08:13:47.058771 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a3eb6579-5ad7-4817-8937-c9a6ca868920-config" (OuterVolumeSpecName: "config") pod "a3eb6579-5ad7-4817-8937-c9a6ca868920" (UID: "a3eb6579-5ad7-4817-8937-c9a6ca868920"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 08:13:47 crc kubenswrapper[4875]: I1007 08:13:47.064312 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a3eb6579-5ad7-4817-8937-c9a6ca868920-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "a3eb6579-5ad7-4817-8937-c9a6ca868920" (UID: "a3eb6579-5ad7-4817-8937-c9a6ca868920"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 08:13:47 crc kubenswrapper[4875]: I1007 08:13:47.065160 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2j6pl\" (UniqueName: \"kubernetes.io/projected/0796acc2-62bc-446e-aba7-72d4065a9e3b-kube-api-access-2j6pl\") pod \"0796acc2-62bc-446e-aba7-72d4065a9e3b\" (UID: \"0796acc2-62bc-446e-aba7-72d4065a9e3b\") " Oct 07 08:13:47 crc kubenswrapper[4875]: I1007 08:13:47.065217 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e118a2f5-641e-419c-b60a-f8f6634b6262-scripts\") pod \"e118a2f5-641e-419c-b60a-f8f6634b6262\" (UID: \"e118a2f5-641e-419c-b60a-f8f6634b6262\") " Oct 07 08:13:47 crc kubenswrapper[4875]: I1007 08:13:47.065249 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0796acc2-62bc-446e-aba7-72d4065a9e3b-scripts\") pod \"0796acc2-62bc-446e-aba7-72d4065a9e3b\" (UID: \"0796acc2-62bc-446e-aba7-72d4065a9e3b\") " Oct 07 08:13:47 crc kubenswrapper[4875]: I1007 08:13:47.065394 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/0796acc2-62bc-446e-aba7-72d4065a9e3b-horizon-secret-key\") pod \"0796acc2-62bc-446e-aba7-72d4065a9e3b\" (UID: \"0796acc2-62bc-446e-aba7-72d4065a9e3b\") " Oct 07 08:13:47 crc kubenswrapper[4875]: I1007 08:13:47.065430 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/4fc34509-2d69-45f6-95ba-75b873dc58ed-scripts\") pod \"4fc34509-2d69-45f6-95ba-75b873dc58ed\" (UID: \"4fc34509-2d69-45f6-95ba-75b873dc58ed\") " Oct 07 08:13:47 crc kubenswrapper[4875]: I1007 08:13:47.065527 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/0796acc2-62bc-446e-aba7-72d4065a9e3b-config-data\") pod \"0796acc2-62bc-446e-aba7-72d4065a9e3b\" (UID: \"0796acc2-62bc-446e-aba7-72d4065a9e3b\") " Oct 07 08:13:47 crc kubenswrapper[4875]: I1007 08:13:47.065574 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/e118a2f5-641e-419c-b60a-f8f6634b6262-horizon-secret-key\") pod \"e118a2f5-641e-419c-b60a-f8f6634b6262\" (UID: \"e118a2f5-641e-419c-b60a-f8f6634b6262\") " Oct 07 08:13:47 crc kubenswrapper[4875]: I1007 08:13:47.065605 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/4fc34509-2d69-45f6-95ba-75b873dc58ed-horizon-secret-key\") pod \"4fc34509-2d69-45f6-95ba-75b873dc58ed\" (UID: \"4fc34509-2d69-45f6-95ba-75b873dc58ed\") " Oct 07 08:13:47 crc kubenswrapper[4875]: I1007 08:13:47.065640 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2bgn5\" (UniqueName: \"kubernetes.io/projected/4fc34509-2d69-45f6-95ba-75b873dc58ed-kube-api-access-2bgn5\") pod \"4fc34509-2d69-45f6-95ba-75b873dc58ed\" (UID: \"4fc34509-2d69-45f6-95ba-75b873dc58ed\") " Oct 07 08:13:47 crc kubenswrapper[4875]: I1007 08:13:47.065663 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/4fc34509-2d69-45f6-95ba-75b873dc58ed-config-data\") pod \"4fc34509-2d69-45f6-95ba-75b873dc58ed\" (UID: \"4fc34509-2d69-45f6-95ba-75b873dc58ed\") " Oct 07 08:13:47 crc kubenswrapper[4875]: I1007 08:13:47.065731 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k2l2g\" (UniqueName: \"kubernetes.io/projected/e118a2f5-641e-419c-b60a-f8f6634b6262-kube-api-access-k2l2g\") pod \"e118a2f5-641e-419c-b60a-f8f6634b6262\" (UID: \"e118a2f5-641e-419c-b60a-f8f6634b6262\") " Oct 07 08:13:47 crc kubenswrapper[4875]: I1007 08:13:47.065763 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/e118a2f5-641e-419c-b60a-f8f6634b6262-config-data\") pod \"e118a2f5-641e-419c-b60a-f8f6634b6262\" (UID: \"e118a2f5-641e-419c-b60a-f8f6634b6262\") " Oct 07 08:13:47 crc kubenswrapper[4875]: I1007 08:13:47.065799 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4fc34509-2d69-45f6-95ba-75b873dc58ed-logs\") pod \"4fc34509-2d69-45f6-95ba-75b873dc58ed\" (UID: \"4fc34509-2d69-45f6-95ba-75b873dc58ed\") " Oct 07 08:13:47 crc kubenswrapper[4875]: I1007 08:13:47.065823 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e118a2f5-641e-419c-b60a-f8f6634b6262-logs\") pod \"e118a2f5-641e-419c-b60a-f8f6634b6262\" (UID: \"e118a2f5-641e-419c-b60a-f8f6634b6262\") " Oct 07 08:13:47 crc kubenswrapper[4875]: I1007 08:13:47.065870 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0796acc2-62bc-446e-aba7-72d4065a9e3b-logs\") pod \"0796acc2-62bc-446e-aba7-72d4065a9e3b\" (UID: \"0796acc2-62bc-446e-aba7-72d4065a9e3b\") " Oct 07 08:13:47 crc kubenswrapper[4875]: I1007 08:13:47.066287 4875 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/46209aef-15e8-4cf8-8a66-ee09384cc053-logs\") on node \"crc\" DevicePath \"\"" Oct 07 08:13:47 crc kubenswrapper[4875]: I1007 08:13:47.066327 4875 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" " Oct 07 08:13:47 crc kubenswrapper[4875]: I1007 08:13:47.066340 4875 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a3eb6579-5ad7-4817-8937-c9a6ca868920-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 07 08:13:47 crc kubenswrapper[4875]: I1007 08:13:47.066353 4875 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/46209aef-15e8-4cf8-8a66-ee09384cc053-scripts\") on node \"crc\" DevicePath \"\"" Oct 07 08:13:47 crc kubenswrapper[4875]: I1007 08:13:47.066362 4875 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/46209aef-15e8-4cf8-8a66-ee09384cc053-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 08:13:47 crc kubenswrapper[4875]: I1007 08:13:47.066371 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cfbb2\" (UniqueName: \"kubernetes.io/projected/46209aef-15e8-4cf8-8a66-ee09384cc053-kube-api-access-cfbb2\") on node \"crc\" DevicePath \"\"" Oct 07 08:13:47 crc kubenswrapper[4875]: I1007 08:13:47.066383 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8j64v\" (UniqueName: \"kubernetes.io/projected/a3eb6579-5ad7-4817-8937-c9a6ca868920-kube-api-access-8j64v\") on node \"crc\" DevicePath \"\"" Oct 07 08:13:47 crc kubenswrapper[4875]: I1007 08:13:47.066396 4875 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a3eb6579-5ad7-4817-8937-c9a6ca868920-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 07 08:13:47 crc kubenswrapper[4875]: I1007 08:13:47.066409 4875 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a3eb6579-5ad7-4817-8937-c9a6ca868920-config\") on node \"crc\" DevicePath \"\"" Oct 07 08:13:47 crc kubenswrapper[4875]: I1007 08:13:47.066421 4875 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a3eb6579-5ad7-4817-8937-c9a6ca868920-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 07 08:13:47 crc kubenswrapper[4875]: I1007 08:13:47.066430 4875 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/46209aef-15e8-4cf8-8a66-ee09384cc053-httpd-run\") on node \"crc\" DevicePath \"\"" Oct 07 08:13:47 crc kubenswrapper[4875]: I1007 08:13:47.066391 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0796acc2-62bc-446e-aba7-72d4065a9e3b-scripts" (OuterVolumeSpecName: "scripts") pod "0796acc2-62bc-446e-aba7-72d4065a9e3b" (UID: "0796acc2-62bc-446e-aba7-72d4065a9e3b"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 08:13:47 crc kubenswrapper[4875]: I1007 08:13:47.066931 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e118a2f5-641e-419c-b60a-f8f6634b6262-scripts" (OuterVolumeSpecName: "scripts") pod "e118a2f5-641e-419c-b60a-f8f6634b6262" (UID: "e118a2f5-641e-419c-b60a-f8f6634b6262"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 08:13:47 crc kubenswrapper[4875]: I1007 08:13:47.067244 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e118a2f5-641e-419c-b60a-f8f6634b6262-logs" (OuterVolumeSpecName: "logs") pod "e118a2f5-641e-419c-b60a-f8f6634b6262" (UID: "e118a2f5-641e-419c-b60a-f8f6634b6262"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 08:13:47 crc kubenswrapper[4875]: I1007 08:13:47.067575 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4fc34509-2d69-45f6-95ba-75b873dc58ed-logs" (OuterVolumeSpecName: "logs") pod "4fc34509-2d69-45f6-95ba-75b873dc58ed" (UID: "4fc34509-2d69-45f6-95ba-75b873dc58ed"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 08:13:47 crc kubenswrapper[4875]: I1007 08:13:47.068008 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4fc34509-2d69-45f6-95ba-75b873dc58ed-scripts" (OuterVolumeSpecName: "scripts") pod "4fc34509-2d69-45f6-95ba-75b873dc58ed" (UID: "4fc34509-2d69-45f6-95ba-75b873dc58ed"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 08:13:47 crc kubenswrapper[4875]: I1007 08:13:47.068388 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0796acc2-62bc-446e-aba7-72d4065a9e3b-logs" (OuterVolumeSpecName: "logs") pod "0796acc2-62bc-446e-aba7-72d4065a9e3b" (UID: "0796acc2-62bc-446e-aba7-72d4065a9e3b"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 08:13:47 crc kubenswrapper[4875]: I1007 08:13:47.068561 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0796acc2-62bc-446e-aba7-72d4065a9e3b-config-data" (OuterVolumeSpecName: "config-data") pod "0796acc2-62bc-446e-aba7-72d4065a9e3b" (UID: "0796acc2-62bc-446e-aba7-72d4065a9e3b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 08:13:47 crc kubenswrapper[4875]: I1007 08:13:47.069172 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e118a2f5-641e-419c-b60a-f8f6634b6262-config-data" (OuterVolumeSpecName: "config-data") pod "e118a2f5-641e-419c-b60a-f8f6634b6262" (UID: "e118a2f5-641e-419c-b60a-f8f6634b6262"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 08:13:47 crc kubenswrapper[4875]: I1007 08:13:47.070405 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0796acc2-62bc-446e-aba7-72d4065a9e3b-kube-api-access-2j6pl" (OuterVolumeSpecName: "kube-api-access-2j6pl") pod "0796acc2-62bc-446e-aba7-72d4065a9e3b" (UID: "0796acc2-62bc-446e-aba7-72d4065a9e3b"). InnerVolumeSpecName "kube-api-access-2j6pl". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 08:13:47 crc kubenswrapper[4875]: I1007 08:13:47.070753 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0796acc2-62bc-446e-aba7-72d4065a9e3b-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "0796acc2-62bc-446e-aba7-72d4065a9e3b" (UID: "0796acc2-62bc-446e-aba7-72d4065a9e3b"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:13:47 crc kubenswrapper[4875]: I1007 08:13:47.071023 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4fc34509-2d69-45f6-95ba-75b873dc58ed-config-data" (OuterVolumeSpecName: "config-data") pod "4fc34509-2d69-45f6-95ba-75b873dc58ed" (UID: "4fc34509-2d69-45f6-95ba-75b873dc58ed"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 08:13:47 crc kubenswrapper[4875]: I1007 08:13:47.072966 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4fc34509-2d69-45f6-95ba-75b873dc58ed-kube-api-access-2bgn5" (OuterVolumeSpecName: "kube-api-access-2bgn5") pod "4fc34509-2d69-45f6-95ba-75b873dc58ed" (UID: "4fc34509-2d69-45f6-95ba-75b873dc58ed"). InnerVolumeSpecName "kube-api-access-2bgn5". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 08:13:47 crc kubenswrapper[4875]: I1007 08:13:47.078052 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e118a2f5-641e-419c-b60a-f8f6634b6262-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "e118a2f5-641e-419c-b60a-f8f6634b6262" (UID: "e118a2f5-641e-419c-b60a-f8f6634b6262"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:13:47 crc kubenswrapper[4875]: I1007 08:13:47.078067 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/46209aef-15e8-4cf8-8a66-ee09384cc053-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "46209aef-15e8-4cf8-8a66-ee09384cc053" (UID: "46209aef-15e8-4cf8-8a66-ee09384cc053"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:13:47 crc kubenswrapper[4875]: I1007 08:13:47.081132 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4fc34509-2d69-45f6-95ba-75b873dc58ed-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "4fc34509-2d69-45f6-95ba-75b873dc58ed" (UID: "4fc34509-2d69-45f6-95ba-75b873dc58ed"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:13:47 crc kubenswrapper[4875]: I1007 08:13:47.083227 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a3eb6579-5ad7-4817-8937-c9a6ca868920-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "a3eb6579-5ad7-4817-8937-c9a6ca868920" (UID: "a3eb6579-5ad7-4817-8937-c9a6ca868920"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 08:13:47 crc kubenswrapper[4875]: I1007 08:13:47.093059 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e118a2f5-641e-419c-b60a-f8f6634b6262-kube-api-access-k2l2g" (OuterVolumeSpecName: "kube-api-access-k2l2g") pod "e118a2f5-641e-419c-b60a-f8f6634b6262" (UID: "e118a2f5-641e-419c-b60a-f8f6634b6262"). InnerVolumeSpecName "kube-api-access-k2l2g". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 08:13:47 crc kubenswrapper[4875]: I1007 08:13:47.093461 4875 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage07-crc" (UniqueName: "kubernetes.io/local-volume/local-storage07-crc") on node "crc" Oct 07 08:13:47 crc kubenswrapper[4875]: I1007 08:13:47.097348 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/46209aef-15e8-4cf8-8a66-ee09384cc053-config-data" (OuterVolumeSpecName: "config-data") pod "46209aef-15e8-4cf8-8a66-ee09384cc053" (UID: "46209aef-15e8-4cf8-8a66-ee09384cc053"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:13:47 crc kubenswrapper[4875]: I1007 08:13:47.170044 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k2l2g\" (UniqueName: \"kubernetes.io/projected/e118a2f5-641e-419c-b60a-f8f6634b6262-kube-api-access-k2l2g\") on node \"crc\" DevicePath \"\"" Oct 07 08:13:47 crc kubenswrapper[4875]: I1007 08:13:47.170141 4875 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/e118a2f5-641e-419c-b60a-f8f6634b6262-config-data\") on node \"crc\" DevicePath \"\"" Oct 07 08:13:47 crc kubenswrapper[4875]: I1007 08:13:47.170159 4875 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4fc34509-2d69-45f6-95ba-75b873dc58ed-logs\") on node \"crc\" DevicePath \"\"" Oct 07 08:13:47 crc kubenswrapper[4875]: I1007 08:13:47.170208 4875 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e118a2f5-641e-419c-b60a-f8f6634b6262-logs\") on node \"crc\" DevicePath \"\"" Oct 07 08:13:47 crc kubenswrapper[4875]: I1007 08:13:47.170222 4875 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/46209aef-15e8-4cf8-8a66-ee09384cc053-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 07 08:13:47 crc kubenswrapper[4875]: I1007 08:13:47.170253 4875 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0796acc2-62bc-446e-aba7-72d4065a9e3b-logs\") on node \"crc\" DevicePath \"\"" Oct 07 08:13:47 crc kubenswrapper[4875]: I1007 08:13:47.170265 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2j6pl\" (UniqueName: \"kubernetes.io/projected/0796acc2-62bc-446e-aba7-72d4065a9e3b-kube-api-access-2j6pl\") on node \"crc\" DevicePath \"\"" Oct 07 08:13:47 crc kubenswrapper[4875]: I1007 08:13:47.170276 4875 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0796acc2-62bc-446e-aba7-72d4065a9e3b-scripts\") on node \"crc\" DevicePath \"\"" Oct 07 08:13:47 crc kubenswrapper[4875]: I1007 08:13:47.170288 4875 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e118a2f5-641e-419c-b60a-f8f6634b6262-scripts\") on node \"crc\" DevicePath \"\"" Oct 07 08:13:47 crc kubenswrapper[4875]: I1007 08:13:47.170323 4875 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/a3eb6579-5ad7-4817-8937-c9a6ca868920-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Oct 07 08:13:47 crc kubenswrapper[4875]: I1007 08:13:47.170434 4875 reconciler_common.go:293] "Volume detached for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" DevicePath \"\"" Oct 07 08:13:47 crc kubenswrapper[4875]: I1007 08:13:47.170452 4875 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/0796acc2-62bc-446e-aba7-72d4065a9e3b-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Oct 07 08:13:47 crc kubenswrapper[4875]: I1007 08:13:47.170468 4875 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/4fc34509-2d69-45f6-95ba-75b873dc58ed-scripts\") on node \"crc\" DevicePath \"\"" Oct 07 08:13:47 crc kubenswrapper[4875]: I1007 08:13:47.170480 4875 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/46209aef-15e8-4cf8-8a66-ee09384cc053-config-data\") on node \"crc\" DevicePath \"\"" Oct 07 08:13:47 crc kubenswrapper[4875]: I1007 08:13:47.170491 4875 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/0796acc2-62bc-446e-aba7-72d4065a9e3b-config-data\") on node \"crc\" DevicePath \"\"" Oct 07 08:13:47 crc kubenswrapper[4875]: I1007 08:13:47.170503 4875 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/e118a2f5-641e-419c-b60a-f8f6634b6262-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Oct 07 08:13:47 crc kubenswrapper[4875]: I1007 08:13:47.170525 4875 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/4fc34509-2d69-45f6-95ba-75b873dc58ed-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Oct 07 08:13:47 crc kubenswrapper[4875]: I1007 08:13:47.170538 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2bgn5\" (UniqueName: \"kubernetes.io/projected/4fc34509-2d69-45f6-95ba-75b873dc58ed-kube-api-access-2bgn5\") on node \"crc\" DevicePath \"\"" Oct 07 08:13:47 crc kubenswrapper[4875]: I1007 08:13:47.170550 4875 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/4fc34509-2d69-45f6-95ba-75b873dc58ed-config-data\") on node \"crc\" DevicePath \"\"" Oct 07 08:13:47 crc kubenswrapper[4875]: I1007 08:13:47.203662 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"46209aef-15e8-4cf8-8a66-ee09384cc053","Type":"ContainerDied","Data":"8fc672291e8ea59f858dea16895b97935674fbaf12cfbb72854ed3669d8b48eb"} Oct 07 08:13:47 crc kubenswrapper[4875]: I1007 08:13:47.203733 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 07 08:13:47 crc kubenswrapper[4875]: I1007 08:13:47.207305 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5f59b8f679-pgzlv" event={"ID":"a3eb6579-5ad7-4817-8937-c9a6ca868920","Type":"ContainerDied","Data":"7de65ece51d1df4fb4de156bfb73119b027598988dd59a56bd3836ea30e8f91c"} Oct 07 08:13:47 crc kubenswrapper[4875]: I1007 08:13:47.207430 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5f59b8f679-pgzlv" Oct 07 08:13:47 crc kubenswrapper[4875]: I1007 08:13:47.209057 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6dd767d9c-bdd6z" event={"ID":"4fc34509-2d69-45f6-95ba-75b873dc58ed","Type":"ContainerDied","Data":"37c266891db9c74a11da493170b631d01bb05aa7c745c98d9ddfd4f36a528c63"} Oct 07 08:13:47 crc kubenswrapper[4875]: I1007 08:13:47.209106 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-6dd767d9c-bdd6z" Oct 07 08:13:47 crc kubenswrapper[4875]: I1007 08:13:47.214715 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-84b9bd675-95vpd" Oct 07 08:13:47 crc kubenswrapper[4875]: I1007 08:13:47.214708 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-84b9bd675-95vpd" event={"ID":"0796acc2-62bc-446e-aba7-72d4065a9e3b","Type":"ContainerDied","Data":"2cd8371085b9541d6a9084e319515549e446ceb5f14d6fb4d954319bec812dcc"} Oct 07 08:13:47 crc kubenswrapper[4875]: I1007 08:13:47.217368 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-57f6dd6d75-v26bw" event={"ID":"e118a2f5-641e-419c-b60a-f8f6634b6262","Type":"ContainerDied","Data":"83db7f0f580e462da467575ff3a1956c492659a9f192b2bbaab3cbc9c9610d6c"} Oct 07 08:13:47 crc kubenswrapper[4875]: I1007 08:13:47.217439 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-57f6dd6d75-v26bw" Oct 07 08:13:47 crc kubenswrapper[4875]: I1007 08:13:47.259530 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 07 08:13:47 crc kubenswrapper[4875]: I1007 08:13:47.272790 4875 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 07 08:13:47 crc kubenswrapper[4875]: I1007 08:13:47.329647 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 07 08:13:47 crc kubenswrapper[4875]: E1007 08:13:47.330209 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="46209aef-15e8-4cf8-8a66-ee09384cc053" containerName="glance-log" Oct 07 08:13:47 crc kubenswrapper[4875]: I1007 08:13:47.330336 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="46209aef-15e8-4cf8-8a66-ee09384cc053" containerName="glance-log" Oct 07 08:13:47 crc kubenswrapper[4875]: E1007 08:13:47.330353 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a3eb6579-5ad7-4817-8937-c9a6ca868920" containerName="init" Oct 07 08:13:47 crc kubenswrapper[4875]: I1007 08:13:47.330360 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="a3eb6579-5ad7-4817-8937-c9a6ca868920" containerName="init" Oct 07 08:13:47 crc kubenswrapper[4875]: E1007 08:13:47.330379 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="46209aef-15e8-4cf8-8a66-ee09384cc053" containerName="glance-httpd" Oct 07 08:13:47 crc kubenswrapper[4875]: I1007 08:13:47.330385 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="46209aef-15e8-4cf8-8a66-ee09384cc053" containerName="glance-httpd" Oct 07 08:13:47 crc kubenswrapper[4875]: E1007 08:13:47.330409 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a3eb6579-5ad7-4817-8937-c9a6ca868920" containerName="dnsmasq-dns" Oct 07 08:13:47 crc kubenswrapper[4875]: I1007 08:13:47.330415 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="a3eb6579-5ad7-4817-8937-c9a6ca868920" containerName="dnsmasq-dns" Oct 07 08:13:47 crc kubenswrapper[4875]: I1007 08:13:47.330603 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="46209aef-15e8-4cf8-8a66-ee09384cc053" containerName="glance-httpd" Oct 07 08:13:47 crc kubenswrapper[4875]: I1007 08:13:47.330621 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="a3eb6579-5ad7-4817-8937-c9a6ca868920" containerName="dnsmasq-dns" Oct 07 08:13:47 crc kubenswrapper[4875]: I1007 08:13:47.330636 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="46209aef-15e8-4cf8-8a66-ee09384cc053" containerName="glance-log" Oct 07 08:13:47 crc kubenswrapper[4875]: I1007 08:13:47.334464 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 07 08:13:47 crc kubenswrapper[4875]: I1007 08:13:47.337836 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Oct 07 08:13:47 crc kubenswrapper[4875]: I1007 08:13:47.338664 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Oct 07 08:13:47 crc kubenswrapper[4875]: I1007 08:13:47.353994 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-6dd767d9c-bdd6z"] Oct 07 08:13:47 crc kubenswrapper[4875]: I1007 08:13:47.362990 4875 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-6dd767d9c-bdd6z"] Oct 07 08:13:47 crc kubenswrapper[4875]: I1007 08:13:47.372303 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 07 08:13:47 crc kubenswrapper[4875]: I1007 08:13:47.391327 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-84b9bd675-95vpd"] Oct 07 08:13:47 crc kubenswrapper[4875]: I1007 08:13:47.398488 4875 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-84b9bd675-95vpd"] Oct 07 08:13:47 crc kubenswrapper[4875]: I1007 08:13:47.408418 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5f59b8f679-pgzlv"] Oct 07 08:13:47 crc kubenswrapper[4875]: I1007 08:13:47.413464 4875 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5f59b8f679-pgzlv"] Oct 07 08:13:47 crc kubenswrapper[4875]: I1007 08:13:47.424401 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-57f6dd6d75-v26bw"] Oct 07 08:13:47 crc kubenswrapper[4875]: I1007 08:13:47.430561 4875 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-57f6dd6d75-v26bw"] Oct 07 08:13:47 crc kubenswrapper[4875]: I1007 08:13:47.475201 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-internal-api-0\" (UID: \"626af307-8e17-427d-a18f-70ad5e8cc62f\") " pod="openstack/glance-default-internal-api-0" Oct 07 08:13:47 crc kubenswrapper[4875]: I1007 08:13:47.475261 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/626af307-8e17-427d-a18f-70ad5e8cc62f-logs\") pod \"glance-default-internal-api-0\" (UID: \"626af307-8e17-427d-a18f-70ad5e8cc62f\") " pod="openstack/glance-default-internal-api-0" Oct 07 08:13:47 crc kubenswrapper[4875]: I1007 08:13:47.475296 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/626af307-8e17-427d-a18f-70ad5e8cc62f-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"626af307-8e17-427d-a18f-70ad5e8cc62f\") " pod="openstack/glance-default-internal-api-0" Oct 07 08:13:47 crc kubenswrapper[4875]: I1007 08:13:47.475346 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/626af307-8e17-427d-a18f-70ad5e8cc62f-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"626af307-8e17-427d-a18f-70ad5e8cc62f\") " pod="openstack/glance-default-internal-api-0" Oct 07 08:13:47 crc kubenswrapper[4875]: I1007 08:13:47.475420 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/626af307-8e17-427d-a18f-70ad5e8cc62f-config-data\") pod \"glance-default-internal-api-0\" (UID: \"626af307-8e17-427d-a18f-70ad5e8cc62f\") " pod="openstack/glance-default-internal-api-0" Oct 07 08:13:47 crc kubenswrapper[4875]: I1007 08:13:47.475774 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6bqk5\" (UniqueName: \"kubernetes.io/projected/626af307-8e17-427d-a18f-70ad5e8cc62f-kube-api-access-6bqk5\") pod \"glance-default-internal-api-0\" (UID: \"626af307-8e17-427d-a18f-70ad5e8cc62f\") " pod="openstack/glance-default-internal-api-0" Oct 07 08:13:47 crc kubenswrapper[4875]: I1007 08:13:47.475840 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/626af307-8e17-427d-a18f-70ad5e8cc62f-scripts\") pod \"glance-default-internal-api-0\" (UID: \"626af307-8e17-427d-a18f-70ad5e8cc62f\") " pod="openstack/glance-default-internal-api-0" Oct 07 08:13:47 crc kubenswrapper[4875]: I1007 08:13:47.475923 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/626af307-8e17-427d-a18f-70ad5e8cc62f-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"626af307-8e17-427d-a18f-70ad5e8cc62f\") " pod="openstack/glance-default-internal-api-0" Oct 07 08:13:47 crc kubenswrapper[4875]: I1007 08:13:47.577668 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-internal-api-0\" (UID: \"626af307-8e17-427d-a18f-70ad5e8cc62f\") " pod="openstack/glance-default-internal-api-0" Oct 07 08:13:47 crc kubenswrapper[4875]: I1007 08:13:47.578149 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/626af307-8e17-427d-a18f-70ad5e8cc62f-logs\") pod \"glance-default-internal-api-0\" (UID: \"626af307-8e17-427d-a18f-70ad5e8cc62f\") " pod="openstack/glance-default-internal-api-0" Oct 07 08:13:47 crc kubenswrapper[4875]: I1007 08:13:47.578177 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/626af307-8e17-427d-a18f-70ad5e8cc62f-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"626af307-8e17-427d-a18f-70ad5e8cc62f\") " pod="openstack/glance-default-internal-api-0" Oct 07 08:13:47 crc kubenswrapper[4875]: I1007 08:13:47.578207 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/626af307-8e17-427d-a18f-70ad5e8cc62f-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"626af307-8e17-427d-a18f-70ad5e8cc62f\") " pod="openstack/glance-default-internal-api-0" Oct 07 08:13:47 crc kubenswrapper[4875]: I1007 08:13:47.578223 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/626af307-8e17-427d-a18f-70ad5e8cc62f-config-data\") pod \"glance-default-internal-api-0\" (UID: \"626af307-8e17-427d-a18f-70ad5e8cc62f\") " pod="openstack/glance-default-internal-api-0" Oct 07 08:13:47 crc kubenswrapper[4875]: I1007 08:13:47.578231 4875 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-internal-api-0\" (UID: \"626af307-8e17-427d-a18f-70ad5e8cc62f\") device mount path \"/mnt/openstack/pv07\"" pod="openstack/glance-default-internal-api-0" Oct 07 08:13:47 crc kubenswrapper[4875]: I1007 08:13:47.578392 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6bqk5\" (UniqueName: \"kubernetes.io/projected/626af307-8e17-427d-a18f-70ad5e8cc62f-kube-api-access-6bqk5\") pod \"glance-default-internal-api-0\" (UID: \"626af307-8e17-427d-a18f-70ad5e8cc62f\") " pod="openstack/glance-default-internal-api-0" Oct 07 08:13:47 crc kubenswrapper[4875]: I1007 08:13:47.578428 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/626af307-8e17-427d-a18f-70ad5e8cc62f-scripts\") pod \"glance-default-internal-api-0\" (UID: \"626af307-8e17-427d-a18f-70ad5e8cc62f\") " pod="openstack/glance-default-internal-api-0" Oct 07 08:13:47 crc kubenswrapper[4875]: I1007 08:13:47.578466 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/626af307-8e17-427d-a18f-70ad5e8cc62f-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"626af307-8e17-427d-a18f-70ad5e8cc62f\") " pod="openstack/glance-default-internal-api-0" Oct 07 08:13:47 crc kubenswrapper[4875]: I1007 08:13:47.579098 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/626af307-8e17-427d-a18f-70ad5e8cc62f-logs\") pod \"glance-default-internal-api-0\" (UID: \"626af307-8e17-427d-a18f-70ad5e8cc62f\") " pod="openstack/glance-default-internal-api-0" Oct 07 08:13:47 crc kubenswrapper[4875]: I1007 08:13:47.579159 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/626af307-8e17-427d-a18f-70ad5e8cc62f-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"626af307-8e17-427d-a18f-70ad5e8cc62f\") " pod="openstack/glance-default-internal-api-0" Oct 07 08:13:47 crc kubenswrapper[4875]: I1007 08:13:47.583302 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/626af307-8e17-427d-a18f-70ad5e8cc62f-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"626af307-8e17-427d-a18f-70ad5e8cc62f\") " pod="openstack/glance-default-internal-api-0" Oct 07 08:13:47 crc kubenswrapper[4875]: I1007 08:13:47.583381 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/626af307-8e17-427d-a18f-70ad5e8cc62f-config-data\") pod \"glance-default-internal-api-0\" (UID: \"626af307-8e17-427d-a18f-70ad5e8cc62f\") " pod="openstack/glance-default-internal-api-0" Oct 07 08:13:47 crc kubenswrapper[4875]: I1007 08:13:47.583916 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/626af307-8e17-427d-a18f-70ad5e8cc62f-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"626af307-8e17-427d-a18f-70ad5e8cc62f\") " pod="openstack/glance-default-internal-api-0" Oct 07 08:13:47 crc kubenswrapper[4875]: I1007 08:13:47.584515 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/626af307-8e17-427d-a18f-70ad5e8cc62f-scripts\") pod \"glance-default-internal-api-0\" (UID: \"626af307-8e17-427d-a18f-70ad5e8cc62f\") " pod="openstack/glance-default-internal-api-0" Oct 07 08:13:47 crc kubenswrapper[4875]: I1007 08:13:47.594548 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6bqk5\" (UniqueName: \"kubernetes.io/projected/626af307-8e17-427d-a18f-70ad5e8cc62f-kube-api-access-6bqk5\") pod \"glance-default-internal-api-0\" (UID: \"626af307-8e17-427d-a18f-70ad5e8cc62f\") " pod="openstack/glance-default-internal-api-0" Oct 07 08:13:47 crc kubenswrapper[4875]: I1007 08:13:47.604208 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-internal-api-0\" (UID: \"626af307-8e17-427d-a18f-70ad5e8cc62f\") " pod="openstack/glance-default-internal-api-0" Oct 07 08:13:47 crc kubenswrapper[4875]: I1007 08:13:47.662747 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 07 08:13:47 crc kubenswrapper[4875]: I1007 08:13:47.710325 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0796acc2-62bc-446e-aba7-72d4065a9e3b" path="/var/lib/kubelet/pods/0796acc2-62bc-446e-aba7-72d4065a9e3b/volumes" Oct 07 08:13:47 crc kubenswrapper[4875]: I1007 08:13:47.710935 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="46209aef-15e8-4cf8-8a66-ee09384cc053" path="/var/lib/kubelet/pods/46209aef-15e8-4cf8-8a66-ee09384cc053/volumes" Oct 07 08:13:47 crc kubenswrapper[4875]: I1007 08:13:47.711787 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4fc34509-2d69-45f6-95ba-75b873dc58ed" path="/var/lib/kubelet/pods/4fc34509-2d69-45f6-95ba-75b873dc58ed/volumes" Oct 07 08:13:47 crc kubenswrapper[4875]: I1007 08:13:47.712355 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a3eb6579-5ad7-4817-8937-c9a6ca868920" path="/var/lib/kubelet/pods/a3eb6579-5ad7-4817-8937-c9a6ca868920/volumes" Oct 07 08:13:47 crc kubenswrapper[4875]: I1007 08:13:47.713654 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e118a2f5-641e-419c-b60a-f8f6634b6262" path="/var/lib/kubelet/pods/e118a2f5-641e-419c-b60a-f8f6634b6262/volumes" Oct 07 08:13:48 crc kubenswrapper[4875]: E1007 08:13:48.052086 4875 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified" Oct 07 08:13:48 crc kubenswrapper[4875]: E1007 08:13:48.052405 4875 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:cinder-db-sync,Image:quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_set_configs && /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:etc-machine-id,ReadOnly:true,MountPath:/etc/machine-id,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:scripts,ReadOnly:true,MountPath:/usr/local/bin/container-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/config-data/merged,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/my.cnf,SubPath:my.cnf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/cinder/cinder.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:db-sync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-s4csn,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:nil,Privileged:nil,SELinuxOptions:nil,RunAsUser:*0,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod cinder-db-sync-8lc4s_openstack(08b9d931-b59f-4e6c-9081-c8b918d37ba8): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 07 08:13:48 crc kubenswrapper[4875]: E1007 08:13:48.053647 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/cinder-db-sync-8lc4s" podUID="08b9d931-b59f-4e6c-9081-c8b918d37ba8" Oct 07 08:13:48 crc kubenswrapper[4875]: I1007 08:13:48.098136 4875 scope.go:117] "RemoveContainer" containerID="034268eef1cbeaa8e0115acddff968858f73e2d23a675b81c86a0251ff5a45d7" Oct 07 08:13:48 crc kubenswrapper[4875]: I1007 08:13:48.214826 4875 scope.go:117] "RemoveContainer" containerID="2b64d78ee9b59926f8046a8e1f9a2976903829afa57667f789354be6729e7e02" Oct 07 08:13:48 crc kubenswrapper[4875]: E1007 08:13:48.265022 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified\\\"\"" pod="openstack/cinder-db-sync-8lc4s" podUID="08b9d931-b59f-4e6c-9081-c8b918d37ba8" Oct 07 08:13:48 crc kubenswrapper[4875]: I1007 08:13:48.265337 4875 scope.go:117] "RemoveContainer" containerID="177b7130f64a1946d1b93f049b45ee9a2bb403c591d4a0c0e34b481470431644" Oct 07 08:13:48 crc kubenswrapper[4875]: I1007 08:13:48.308131 4875 scope.go:117] "RemoveContainer" containerID="8a70a5f940493356b4e0cf1ffa90eb667b5433a3f21aa152a172e0c34fea6bc5" Oct 07 08:13:48 crc kubenswrapper[4875]: I1007 08:13:48.572397 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-64854f4c8-d67s8"] Oct 07 08:13:48 crc kubenswrapper[4875]: W1007 08:13:48.579407 4875 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podcfb37cba_9925_4808_9b9f_6dfd2550c15e.slice/crio-f608d37daf5268e783ebc5cd7f11933fdc7784ed6fb814df925a2e53301c32d1 WatchSource:0}: Error finding container f608d37daf5268e783ebc5cd7f11933fdc7784ed6fb814df925a2e53301c32d1: Status 404 returned error can't find the container with id f608d37daf5268e783ebc5cd7f11933fdc7784ed6fb814df925a2e53301c32d1 Oct 07 08:13:48 crc kubenswrapper[4875]: I1007 08:13:48.582824 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-rrk4v"] Oct 07 08:13:48 crc kubenswrapper[4875]: I1007 08:13:48.605707 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-8486dbbd8b-5n679"] Oct 07 08:13:48 crc kubenswrapper[4875]: I1007 08:13:48.738549 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-48kgl"] Oct 07 08:13:48 crc kubenswrapper[4875]: I1007 08:13:48.739719 4875 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-5f59b8f679-pgzlv" podUID="a3eb6579-5ad7-4817-8937-c9a6ca868920" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.127:5353: i/o timeout" Oct 07 08:13:48 crc kubenswrapper[4875]: I1007 08:13:48.853630 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 07 08:13:49 crc kubenswrapper[4875]: I1007 08:13:49.269267 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"9b9f241f-e30b-416b-8a42-666f2fc72a79","Type":"ContainerStarted","Data":"e2d678457fa6e068a8dad5a69c489816e1afb7db6be2d66b5dc02afd3c1cfbcc"} Oct 07 08:13:49 crc kubenswrapper[4875]: I1007 08:13:49.272738 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-48kgl" event={"ID":"8c8adc3b-9b0c-4fda-b20c-59d4c455fe72","Type":"ContainerStarted","Data":"a565a3e99c2bdb53d7bf5020ac4b7dc213507bba5456cf92d5e6f46ec458e791"} Oct 07 08:13:49 crc kubenswrapper[4875]: I1007 08:13:49.272781 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-48kgl" event={"ID":"8c8adc3b-9b0c-4fda-b20c-59d4c455fe72","Type":"ContainerStarted","Data":"650b52735dc8c94cefd9deb100f5f603a0122ecfab0e8ba3c1e302d31b042d7e"} Oct 07 08:13:49 crc kubenswrapper[4875]: I1007 08:13:49.281143 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-rrk4v" event={"ID":"927a5f2e-c935-47bb-b7b3-0efc834566ca","Type":"ContainerStarted","Data":"3c57a58bee060a2594c16b9cc01373c4132f22d841e87e265afd0d2a0eff9a1c"} Oct 07 08:13:49 crc kubenswrapper[4875]: I1007 08:13:49.281266 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-rrk4v" event={"ID":"927a5f2e-c935-47bb-b7b3-0efc834566ca","Type":"ContainerStarted","Data":"4a2917a7fddd306d58d4a421aac63e007ee6b22b245be6dcf4d617005d2bf62a"} Oct 07 08:13:49 crc kubenswrapper[4875]: I1007 08:13:49.285397 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-s9cbr" event={"ID":"2c511555-5539-4e0f-9693-8ecc46fc4b8a","Type":"ContainerStarted","Data":"964a3ded33494b263ffb7e5ac1c58bcc035dc255bba52e5e8c55795a4a414ee0"} Oct 07 08:13:49 crc kubenswrapper[4875]: I1007 08:13:49.292611 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-8486dbbd8b-5n679" event={"ID":"35b9d27a-06f2-4b00-917f-f078fdf1b1c2","Type":"ContainerStarted","Data":"eea62b4426de789b955235a7b6786d9e1687399a33512d2eac8353e901f57601"} Oct 07 08:13:49 crc kubenswrapper[4875]: I1007 08:13:49.292679 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-8486dbbd8b-5n679" event={"ID":"35b9d27a-06f2-4b00-917f-f078fdf1b1c2","Type":"ContainerStarted","Data":"365ccf00ad28ca893aa6bcec5d42a4fc409283767b29a5a8a61a3c1fde717c41"} Oct 07 08:13:49 crc kubenswrapper[4875]: I1007 08:13:49.301827 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0149ac72-d21f-45a8-a322-a97d03c1d85a","Type":"ContainerStarted","Data":"63c478e4e67c6d548105ff3e8393eb7b91f350c3bc1b76640a2274b246438439"} Oct 07 08:13:49 crc kubenswrapper[4875]: I1007 08:13:49.303287 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-48kgl" podStartSLOduration=10.303263497 podStartE2EDuration="10.303263497s" podCreationTimestamp="2025-10-07 08:13:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 08:13:49.294116747 +0000 UTC m=+1054.253887290" watchObservedRunningTime="2025-10-07 08:13:49.303263497 +0000 UTC m=+1054.263034040" Oct 07 08:13:49 crc kubenswrapper[4875]: I1007 08:13:49.304515 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-64854f4c8-d67s8" event={"ID":"cfb37cba-9925-4808-9b9f-6dfd2550c15e","Type":"ContainerStarted","Data":"17dd6645cda05a62349946cadfa27e426ca053c60afe0e671fad71e34d007029"} Oct 07 08:13:49 crc kubenswrapper[4875]: I1007 08:13:49.304620 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-64854f4c8-d67s8" event={"ID":"cfb37cba-9925-4808-9b9f-6dfd2550c15e","Type":"ContainerStarted","Data":"f608d37daf5268e783ebc5cd7f11933fdc7784ed6fb814df925a2e53301c32d1"} Oct 07 08:13:49 crc kubenswrapper[4875]: I1007 08:13:49.337309 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-db-sync-rrk4v" podStartSLOduration=22.337280482 podStartE2EDuration="22.337280482s" podCreationTimestamp="2025-10-07 08:13:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 08:13:49.322624272 +0000 UTC m=+1054.282394835" watchObservedRunningTime="2025-10-07 08:13:49.337280482 +0000 UTC m=+1054.297051025" Oct 07 08:13:49 crc kubenswrapper[4875]: I1007 08:13:49.352369 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-db-sync-s9cbr" podStartSLOduration=4.077373326 podStartE2EDuration="29.352338626s" podCreationTimestamp="2025-10-07 08:13:20 +0000 UTC" firstStartedPulling="2025-10-07 08:13:21.409580151 +0000 UTC m=+1026.369350694" lastFinishedPulling="2025-10-07 08:13:46.684545461 +0000 UTC m=+1051.644315994" observedRunningTime="2025-10-07 08:13:49.343451074 +0000 UTC m=+1054.303221637" watchObservedRunningTime="2025-10-07 08:13:49.352338626 +0000 UTC m=+1054.312109169" Oct 07 08:13:49 crc kubenswrapper[4875]: I1007 08:13:49.921296 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 07 08:13:50 crc kubenswrapper[4875]: I1007 08:13:50.323172 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"626af307-8e17-427d-a18f-70ad5e8cc62f","Type":"ContainerStarted","Data":"119540c79b8f0f6afcc7298252d97062239e8fe2a352375b94a272c2d1f8be80"} Oct 07 08:13:50 crc kubenswrapper[4875]: I1007 08:13:50.328505 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-8486dbbd8b-5n679" event={"ID":"35b9d27a-06f2-4b00-917f-f078fdf1b1c2","Type":"ContainerStarted","Data":"84ac59b9e48b4282c66bf5edf3989e3485afe16efb106940292657dba1fad9f1"} Oct 07 08:13:50 crc kubenswrapper[4875]: I1007 08:13:50.332068 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0149ac72-d21f-45a8-a322-a97d03c1d85a","Type":"ContainerStarted","Data":"2319590f78bee1333615b5e2d800305af8de4eaf7a85d56f583ba89474a8ca66"} Oct 07 08:13:50 crc kubenswrapper[4875]: I1007 08:13:50.340162 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"9b9f241f-e30b-416b-8a42-666f2fc72a79","Type":"ContainerStarted","Data":"a226cfeffaff8aef7ebf6ff95b2220c436fff77223cda6da6581a118b7a391e5"} Oct 07 08:13:50 crc kubenswrapper[4875]: I1007 08:13:50.356518 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-8486dbbd8b-5n679" podStartSLOduration=21.886378872 podStartE2EDuration="22.356493854s" podCreationTimestamp="2025-10-07 08:13:28 +0000 UTC" firstStartedPulling="2025-10-07 08:13:48.614865984 +0000 UTC m=+1053.574636527" lastFinishedPulling="2025-10-07 08:13:49.084980966 +0000 UTC m=+1054.044751509" observedRunningTime="2025-10-07 08:13:50.351542702 +0000 UTC m=+1055.311313265" watchObservedRunningTime="2025-10-07 08:13:50.356493854 +0000 UTC m=+1055.316264397" Oct 07 08:13:50 crc kubenswrapper[4875]: I1007 08:13:50.369260 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-64854f4c8-d67s8" event={"ID":"cfb37cba-9925-4808-9b9f-6dfd2550c15e","Type":"ContainerStarted","Data":"46f298287966093217d9b329bff65bb1fca6e467d5dfe06a02808172e58c6c5f"} Oct 07 08:13:50 crc kubenswrapper[4875]: I1007 08:13:50.375913 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=18.375892319 podStartE2EDuration="18.375892319s" podCreationTimestamp="2025-10-07 08:13:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 08:13:50.374587766 +0000 UTC m=+1055.334358309" watchObservedRunningTime="2025-10-07 08:13:50.375892319 +0000 UTC m=+1055.335662862" Oct 07 08:13:50 crc kubenswrapper[4875]: I1007 08:13:50.424244 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-64854f4c8-d67s8" podStartSLOduration=21.973764715 podStartE2EDuration="22.424221583s" podCreationTimestamp="2025-10-07 08:13:28 +0000 UTC" firstStartedPulling="2025-10-07 08:13:48.582234105 +0000 UTC m=+1053.542004648" lastFinishedPulling="2025-10-07 08:13:49.032690953 +0000 UTC m=+1053.992461516" observedRunningTime="2025-10-07 08:13:50.423966555 +0000 UTC m=+1055.383737108" watchObservedRunningTime="2025-10-07 08:13:50.424221583 +0000 UTC m=+1055.383992126" Oct 07 08:13:51 crc kubenswrapper[4875]: I1007 08:13:51.390857 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"9b9f241f-e30b-416b-8a42-666f2fc72a79","Type":"ContainerStarted","Data":"ee0762175ec002306406846fa61e8ec3c525abc7f2c9f45265e8498ebc9ad8e8"} Oct 07 08:13:51 crc kubenswrapper[4875]: I1007 08:13:51.393798 4875 generic.go:334] "Generic (PLEG): container finished" podID="2c511555-5539-4e0f-9693-8ecc46fc4b8a" containerID="964a3ded33494b263ffb7e5ac1c58bcc035dc255bba52e5e8c55795a4a414ee0" exitCode=0 Oct 07 08:13:51 crc kubenswrapper[4875]: I1007 08:13:51.393909 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-s9cbr" event={"ID":"2c511555-5539-4e0f-9693-8ecc46fc4b8a","Type":"ContainerDied","Data":"964a3ded33494b263ffb7e5ac1c58bcc035dc255bba52e5e8c55795a4a414ee0"} Oct 07 08:13:51 crc kubenswrapper[4875]: I1007 08:13:51.396033 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"626af307-8e17-427d-a18f-70ad5e8cc62f","Type":"ContainerStarted","Data":"464d53d8f3c0d9fdd8d6d00b578f9bfe9ea60a8cdc1d474d4094081662663848"} Oct 07 08:13:52 crc kubenswrapper[4875]: I1007 08:13:52.413105 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"626af307-8e17-427d-a18f-70ad5e8cc62f","Type":"ContainerStarted","Data":"9db3ed5127c7d7c091668dd5d498fbdc413bd8ff20bb88b82512985c882a85da"} Oct 07 08:13:52 crc kubenswrapper[4875]: I1007 08:13:52.482351 4875 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Oct 07 08:13:52 crc kubenswrapper[4875]: I1007 08:13:52.482826 4875 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Oct 07 08:13:52 crc kubenswrapper[4875]: I1007 08:13:52.545072 4875 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Oct 07 08:13:52 crc kubenswrapper[4875]: I1007 08:13:52.584811 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=5.584787609 podStartE2EDuration="5.584787609s" podCreationTimestamp="2025-10-07 08:13:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 08:13:52.445367351 +0000 UTC m=+1057.405137914" watchObservedRunningTime="2025-10-07 08:13:52.584787609 +0000 UTC m=+1057.544558152" Oct 07 08:13:52 crc kubenswrapper[4875]: I1007 08:13:52.594626 4875 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Oct 07 08:13:52 crc kubenswrapper[4875]: I1007 08:13:52.921939 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-s9cbr" Oct 07 08:13:53 crc kubenswrapper[4875]: I1007 08:13:53.009121 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2c511555-5539-4e0f-9693-8ecc46fc4b8a-scripts\") pod \"2c511555-5539-4e0f-9693-8ecc46fc4b8a\" (UID: \"2c511555-5539-4e0f-9693-8ecc46fc4b8a\") " Oct 07 08:13:53 crc kubenswrapper[4875]: I1007 08:13:53.010288 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2c511555-5539-4e0f-9693-8ecc46fc4b8a-combined-ca-bundle\") pod \"2c511555-5539-4e0f-9693-8ecc46fc4b8a\" (UID: \"2c511555-5539-4e0f-9693-8ecc46fc4b8a\") " Oct 07 08:13:53 crc kubenswrapper[4875]: I1007 08:13:53.010320 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2c511555-5539-4e0f-9693-8ecc46fc4b8a-logs\") pod \"2c511555-5539-4e0f-9693-8ecc46fc4b8a\" (UID: \"2c511555-5539-4e0f-9693-8ecc46fc4b8a\") " Oct 07 08:13:53 crc kubenswrapper[4875]: I1007 08:13:53.010432 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2c511555-5539-4e0f-9693-8ecc46fc4b8a-config-data\") pod \"2c511555-5539-4e0f-9693-8ecc46fc4b8a\" (UID: \"2c511555-5539-4e0f-9693-8ecc46fc4b8a\") " Oct 07 08:13:53 crc kubenswrapper[4875]: I1007 08:13:53.010531 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zpbk5\" (UniqueName: \"kubernetes.io/projected/2c511555-5539-4e0f-9693-8ecc46fc4b8a-kube-api-access-zpbk5\") pod \"2c511555-5539-4e0f-9693-8ecc46fc4b8a\" (UID: \"2c511555-5539-4e0f-9693-8ecc46fc4b8a\") " Oct 07 08:13:53 crc kubenswrapper[4875]: I1007 08:13:53.012180 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2c511555-5539-4e0f-9693-8ecc46fc4b8a-logs" (OuterVolumeSpecName: "logs") pod "2c511555-5539-4e0f-9693-8ecc46fc4b8a" (UID: "2c511555-5539-4e0f-9693-8ecc46fc4b8a"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 08:13:53 crc kubenswrapper[4875]: I1007 08:13:53.027068 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2c511555-5539-4e0f-9693-8ecc46fc4b8a-scripts" (OuterVolumeSpecName: "scripts") pod "2c511555-5539-4e0f-9693-8ecc46fc4b8a" (UID: "2c511555-5539-4e0f-9693-8ecc46fc4b8a"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:13:53 crc kubenswrapper[4875]: I1007 08:13:53.030251 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2c511555-5539-4e0f-9693-8ecc46fc4b8a-kube-api-access-zpbk5" (OuterVolumeSpecName: "kube-api-access-zpbk5") pod "2c511555-5539-4e0f-9693-8ecc46fc4b8a" (UID: "2c511555-5539-4e0f-9693-8ecc46fc4b8a"). InnerVolumeSpecName "kube-api-access-zpbk5". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 08:13:53 crc kubenswrapper[4875]: I1007 08:13:53.071099 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2c511555-5539-4e0f-9693-8ecc46fc4b8a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2c511555-5539-4e0f-9693-8ecc46fc4b8a" (UID: "2c511555-5539-4e0f-9693-8ecc46fc4b8a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:13:53 crc kubenswrapper[4875]: I1007 08:13:53.086498 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2c511555-5539-4e0f-9693-8ecc46fc4b8a-config-data" (OuterVolumeSpecName: "config-data") pod "2c511555-5539-4e0f-9693-8ecc46fc4b8a" (UID: "2c511555-5539-4e0f-9693-8ecc46fc4b8a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:13:53 crc kubenswrapper[4875]: I1007 08:13:53.113131 4875 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2c511555-5539-4e0f-9693-8ecc46fc4b8a-config-data\") on node \"crc\" DevicePath \"\"" Oct 07 08:13:53 crc kubenswrapper[4875]: I1007 08:13:53.113171 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zpbk5\" (UniqueName: \"kubernetes.io/projected/2c511555-5539-4e0f-9693-8ecc46fc4b8a-kube-api-access-zpbk5\") on node \"crc\" DevicePath \"\"" Oct 07 08:13:53 crc kubenswrapper[4875]: I1007 08:13:53.113182 4875 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2c511555-5539-4e0f-9693-8ecc46fc4b8a-scripts\") on node \"crc\" DevicePath \"\"" Oct 07 08:13:53 crc kubenswrapper[4875]: I1007 08:13:53.113190 4875 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2c511555-5539-4e0f-9693-8ecc46fc4b8a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 08:13:53 crc kubenswrapper[4875]: I1007 08:13:53.113200 4875 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2c511555-5539-4e0f-9693-8ecc46fc4b8a-logs\") on node \"crc\" DevicePath \"\"" Oct 07 08:13:53 crc kubenswrapper[4875]: I1007 08:13:53.452680 4875 generic.go:334] "Generic (PLEG): container finished" podID="8c8adc3b-9b0c-4fda-b20c-59d4c455fe72" containerID="a565a3e99c2bdb53d7bf5020ac4b7dc213507bba5456cf92d5e6f46ec458e791" exitCode=0 Oct 07 08:13:53 crc kubenswrapper[4875]: I1007 08:13:53.453058 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-48kgl" event={"ID":"8c8adc3b-9b0c-4fda-b20c-59d4c455fe72","Type":"ContainerDied","Data":"a565a3e99c2bdb53d7bf5020ac4b7dc213507bba5456cf92d5e6f46ec458e791"} Oct 07 08:13:53 crc kubenswrapper[4875]: I1007 08:13:53.465917 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-s9cbr" Oct 07 08:13:53 crc kubenswrapper[4875]: I1007 08:13:53.466665 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-s9cbr" event={"ID":"2c511555-5539-4e0f-9693-8ecc46fc4b8a","Type":"ContainerDied","Data":"84fa5629d77fa6d214c90fa4737fb649e00d84afaadf9a53ebf3a3322bcaf834"} Oct 07 08:13:53 crc kubenswrapper[4875]: I1007 08:13:53.466727 4875 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="84fa5629d77fa6d214c90fa4737fb649e00d84afaadf9a53ebf3a3322bcaf834" Oct 07 08:13:53 crc kubenswrapper[4875]: I1007 08:13:53.476487 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-km8j5" event={"ID":"7ede5949-4681-4699-befa-f13a645d1f4c","Type":"ContainerStarted","Data":"00e1f17dc0358d3e11a2a727768e6a2e01b317e6de939a3a7da46419b96fc8ce"} Oct 07 08:13:53 crc kubenswrapper[4875]: I1007 08:13:53.477464 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Oct 07 08:13:53 crc kubenswrapper[4875]: I1007 08:13:53.477496 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Oct 07 08:13:53 crc kubenswrapper[4875]: I1007 08:13:53.537816 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-548b9747d4-bkwjt"] Oct 07 08:13:53 crc kubenswrapper[4875]: E1007 08:13:53.538611 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2c511555-5539-4e0f-9693-8ecc46fc4b8a" containerName="placement-db-sync" Oct 07 08:13:53 crc kubenswrapper[4875]: I1007 08:13:53.538635 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="2c511555-5539-4e0f-9693-8ecc46fc4b8a" containerName="placement-db-sync" Oct 07 08:13:53 crc kubenswrapper[4875]: I1007 08:13:53.538848 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="2c511555-5539-4e0f-9693-8ecc46fc4b8a" containerName="placement-db-sync" Oct 07 08:13:53 crc kubenswrapper[4875]: I1007 08:13:53.541319 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-548b9747d4-bkwjt" Oct 07 08:13:53 crc kubenswrapper[4875]: I1007 08:13:53.545685 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-mz6wn" Oct 07 08:13:53 crc kubenswrapper[4875]: I1007 08:13:53.546239 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Oct 07 08:13:53 crc kubenswrapper[4875]: I1007 08:13:53.546430 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-public-svc" Oct 07 08:13:53 crc kubenswrapper[4875]: I1007 08:13:53.546563 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Oct 07 08:13:53 crc kubenswrapper[4875]: I1007 08:13:53.546783 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-internal-svc" Oct 07 08:13:53 crc kubenswrapper[4875]: I1007 08:13:53.571962 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-db-sync-km8j5" podStartSLOduration=2.921491643 podStartE2EDuration="31.5719328s" podCreationTimestamp="2025-10-07 08:13:22 +0000 UTC" firstStartedPulling="2025-10-07 08:13:23.53842181 +0000 UTC m=+1028.498192343" lastFinishedPulling="2025-10-07 08:13:52.188862957 +0000 UTC m=+1057.148633500" observedRunningTime="2025-10-07 08:13:53.518676415 +0000 UTC m=+1058.478446968" watchObservedRunningTime="2025-10-07 08:13:53.5719328 +0000 UTC m=+1058.531703333" Oct 07 08:13:53 crc kubenswrapper[4875]: I1007 08:13:53.594984 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-548b9747d4-bkwjt"] Oct 07 08:13:53 crc kubenswrapper[4875]: I1007 08:13:53.725418 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ba464fba-e931-4f8a-be56-6b5456e1572d-config-data\") pod \"placement-548b9747d4-bkwjt\" (UID: \"ba464fba-e931-4f8a-be56-6b5456e1572d\") " pod="openstack/placement-548b9747d4-bkwjt" Oct 07 08:13:53 crc kubenswrapper[4875]: I1007 08:13:53.725492 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ba464fba-e931-4f8a-be56-6b5456e1572d-public-tls-certs\") pod \"placement-548b9747d4-bkwjt\" (UID: \"ba464fba-e931-4f8a-be56-6b5456e1572d\") " pod="openstack/placement-548b9747d4-bkwjt" Oct 07 08:13:53 crc kubenswrapper[4875]: I1007 08:13:53.725594 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ba464fba-e931-4f8a-be56-6b5456e1572d-scripts\") pod \"placement-548b9747d4-bkwjt\" (UID: \"ba464fba-e931-4f8a-be56-6b5456e1572d\") " pod="openstack/placement-548b9747d4-bkwjt" Oct 07 08:13:53 crc kubenswrapper[4875]: I1007 08:13:53.725658 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ba464fba-e931-4f8a-be56-6b5456e1572d-combined-ca-bundle\") pod \"placement-548b9747d4-bkwjt\" (UID: \"ba464fba-e931-4f8a-be56-6b5456e1572d\") " pod="openstack/placement-548b9747d4-bkwjt" Oct 07 08:13:53 crc kubenswrapper[4875]: I1007 08:13:53.725683 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ba464fba-e931-4f8a-be56-6b5456e1572d-logs\") pod \"placement-548b9747d4-bkwjt\" (UID: \"ba464fba-e931-4f8a-be56-6b5456e1572d\") " pod="openstack/placement-548b9747d4-bkwjt" Oct 07 08:13:53 crc kubenswrapper[4875]: I1007 08:13:53.725716 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ba464fba-e931-4f8a-be56-6b5456e1572d-internal-tls-certs\") pod \"placement-548b9747d4-bkwjt\" (UID: \"ba464fba-e931-4f8a-be56-6b5456e1572d\") " pod="openstack/placement-548b9747d4-bkwjt" Oct 07 08:13:53 crc kubenswrapper[4875]: I1007 08:13:53.725809 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kf9xt\" (UniqueName: \"kubernetes.io/projected/ba464fba-e931-4f8a-be56-6b5456e1572d-kube-api-access-kf9xt\") pod \"placement-548b9747d4-bkwjt\" (UID: \"ba464fba-e931-4f8a-be56-6b5456e1572d\") " pod="openstack/placement-548b9747d4-bkwjt" Oct 07 08:13:53 crc kubenswrapper[4875]: I1007 08:13:53.827962 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ba464fba-e931-4f8a-be56-6b5456e1572d-scripts\") pod \"placement-548b9747d4-bkwjt\" (UID: \"ba464fba-e931-4f8a-be56-6b5456e1572d\") " pod="openstack/placement-548b9747d4-bkwjt" Oct 07 08:13:53 crc kubenswrapper[4875]: I1007 08:13:53.828057 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ba464fba-e931-4f8a-be56-6b5456e1572d-combined-ca-bundle\") pod \"placement-548b9747d4-bkwjt\" (UID: \"ba464fba-e931-4f8a-be56-6b5456e1572d\") " pod="openstack/placement-548b9747d4-bkwjt" Oct 07 08:13:53 crc kubenswrapper[4875]: I1007 08:13:53.828078 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ba464fba-e931-4f8a-be56-6b5456e1572d-logs\") pod \"placement-548b9747d4-bkwjt\" (UID: \"ba464fba-e931-4f8a-be56-6b5456e1572d\") " pod="openstack/placement-548b9747d4-bkwjt" Oct 07 08:13:53 crc kubenswrapper[4875]: I1007 08:13:53.828118 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ba464fba-e931-4f8a-be56-6b5456e1572d-internal-tls-certs\") pod \"placement-548b9747d4-bkwjt\" (UID: \"ba464fba-e931-4f8a-be56-6b5456e1572d\") " pod="openstack/placement-548b9747d4-bkwjt" Oct 07 08:13:53 crc kubenswrapper[4875]: I1007 08:13:53.828149 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kf9xt\" (UniqueName: \"kubernetes.io/projected/ba464fba-e931-4f8a-be56-6b5456e1572d-kube-api-access-kf9xt\") pod \"placement-548b9747d4-bkwjt\" (UID: \"ba464fba-e931-4f8a-be56-6b5456e1572d\") " pod="openstack/placement-548b9747d4-bkwjt" Oct 07 08:13:53 crc kubenswrapper[4875]: I1007 08:13:53.828177 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ba464fba-e931-4f8a-be56-6b5456e1572d-config-data\") pod \"placement-548b9747d4-bkwjt\" (UID: \"ba464fba-e931-4f8a-be56-6b5456e1572d\") " pod="openstack/placement-548b9747d4-bkwjt" Oct 07 08:13:53 crc kubenswrapper[4875]: I1007 08:13:53.828198 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ba464fba-e931-4f8a-be56-6b5456e1572d-public-tls-certs\") pod \"placement-548b9747d4-bkwjt\" (UID: \"ba464fba-e931-4f8a-be56-6b5456e1572d\") " pod="openstack/placement-548b9747d4-bkwjt" Oct 07 08:13:53 crc kubenswrapper[4875]: I1007 08:13:53.829148 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ba464fba-e931-4f8a-be56-6b5456e1572d-logs\") pod \"placement-548b9747d4-bkwjt\" (UID: \"ba464fba-e931-4f8a-be56-6b5456e1572d\") " pod="openstack/placement-548b9747d4-bkwjt" Oct 07 08:13:53 crc kubenswrapper[4875]: I1007 08:13:53.836553 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ba464fba-e931-4f8a-be56-6b5456e1572d-public-tls-certs\") pod \"placement-548b9747d4-bkwjt\" (UID: \"ba464fba-e931-4f8a-be56-6b5456e1572d\") " pod="openstack/placement-548b9747d4-bkwjt" Oct 07 08:13:53 crc kubenswrapper[4875]: I1007 08:13:53.836686 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ba464fba-e931-4f8a-be56-6b5456e1572d-combined-ca-bundle\") pod \"placement-548b9747d4-bkwjt\" (UID: \"ba464fba-e931-4f8a-be56-6b5456e1572d\") " pod="openstack/placement-548b9747d4-bkwjt" Oct 07 08:13:53 crc kubenswrapper[4875]: I1007 08:13:53.839071 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ba464fba-e931-4f8a-be56-6b5456e1572d-scripts\") pod \"placement-548b9747d4-bkwjt\" (UID: \"ba464fba-e931-4f8a-be56-6b5456e1572d\") " pod="openstack/placement-548b9747d4-bkwjt" Oct 07 08:13:53 crc kubenswrapper[4875]: I1007 08:13:53.859843 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ba464fba-e931-4f8a-be56-6b5456e1572d-config-data\") pod \"placement-548b9747d4-bkwjt\" (UID: \"ba464fba-e931-4f8a-be56-6b5456e1572d\") " pod="openstack/placement-548b9747d4-bkwjt" Oct 07 08:13:53 crc kubenswrapper[4875]: I1007 08:13:53.860328 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ba464fba-e931-4f8a-be56-6b5456e1572d-internal-tls-certs\") pod \"placement-548b9747d4-bkwjt\" (UID: \"ba464fba-e931-4f8a-be56-6b5456e1572d\") " pod="openstack/placement-548b9747d4-bkwjt" Oct 07 08:13:53 crc kubenswrapper[4875]: I1007 08:13:53.866468 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kf9xt\" (UniqueName: \"kubernetes.io/projected/ba464fba-e931-4f8a-be56-6b5456e1572d-kube-api-access-kf9xt\") pod \"placement-548b9747d4-bkwjt\" (UID: \"ba464fba-e931-4f8a-be56-6b5456e1572d\") " pod="openstack/placement-548b9747d4-bkwjt" Oct 07 08:13:53 crc kubenswrapper[4875]: I1007 08:13:53.928402 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-548b9747d4-bkwjt" Oct 07 08:13:55 crc kubenswrapper[4875]: I1007 08:13:55.759444 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-48kgl" Oct 07 08:13:55 crc kubenswrapper[4875]: I1007 08:13:55.874458 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8c8adc3b-9b0c-4fda-b20c-59d4c455fe72-combined-ca-bundle\") pod \"8c8adc3b-9b0c-4fda-b20c-59d4c455fe72\" (UID: \"8c8adc3b-9b0c-4fda-b20c-59d4c455fe72\") " Oct 07 08:13:55 crc kubenswrapper[4875]: I1007 08:13:55.874508 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xvt72\" (UniqueName: \"kubernetes.io/projected/8c8adc3b-9b0c-4fda-b20c-59d4c455fe72-kube-api-access-xvt72\") pod \"8c8adc3b-9b0c-4fda-b20c-59d4c455fe72\" (UID: \"8c8adc3b-9b0c-4fda-b20c-59d4c455fe72\") " Oct 07 08:13:55 crc kubenswrapper[4875]: I1007 08:13:55.874585 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8c8adc3b-9b0c-4fda-b20c-59d4c455fe72-config-data\") pod \"8c8adc3b-9b0c-4fda-b20c-59d4c455fe72\" (UID: \"8c8adc3b-9b0c-4fda-b20c-59d4c455fe72\") " Oct 07 08:13:55 crc kubenswrapper[4875]: I1007 08:13:55.874721 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/8c8adc3b-9b0c-4fda-b20c-59d4c455fe72-fernet-keys\") pod \"8c8adc3b-9b0c-4fda-b20c-59d4c455fe72\" (UID: \"8c8adc3b-9b0c-4fda-b20c-59d4c455fe72\") " Oct 07 08:13:55 crc kubenswrapper[4875]: I1007 08:13:55.874742 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/8c8adc3b-9b0c-4fda-b20c-59d4c455fe72-credential-keys\") pod \"8c8adc3b-9b0c-4fda-b20c-59d4c455fe72\" (UID: \"8c8adc3b-9b0c-4fda-b20c-59d4c455fe72\") " Oct 07 08:13:55 crc kubenswrapper[4875]: I1007 08:13:55.874812 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8c8adc3b-9b0c-4fda-b20c-59d4c455fe72-scripts\") pod \"8c8adc3b-9b0c-4fda-b20c-59d4c455fe72\" (UID: \"8c8adc3b-9b0c-4fda-b20c-59d4c455fe72\") " Oct 07 08:13:55 crc kubenswrapper[4875]: I1007 08:13:55.885695 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8c8adc3b-9b0c-4fda-b20c-59d4c455fe72-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "8c8adc3b-9b0c-4fda-b20c-59d4c455fe72" (UID: "8c8adc3b-9b0c-4fda-b20c-59d4c455fe72"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:13:55 crc kubenswrapper[4875]: I1007 08:13:55.887424 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8c8adc3b-9b0c-4fda-b20c-59d4c455fe72-scripts" (OuterVolumeSpecName: "scripts") pod "8c8adc3b-9b0c-4fda-b20c-59d4c455fe72" (UID: "8c8adc3b-9b0c-4fda-b20c-59d4c455fe72"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:13:55 crc kubenswrapper[4875]: I1007 08:13:55.890397 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8c8adc3b-9b0c-4fda-b20c-59d4c455fe72-kube-api-access-xvt72" (OuterVolumeSpecName: "kube-api-access-xvt72") pod "8c8adc3b-9b0c-4fda-b20c-59d4c455fe72" (UID: "8c8adc3b-9b0c-4fda-b20c-59d4c455fe72"). InnerVolumeSpecName "kube-api-access-xvt72". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 08:13:55 crc kubenswrapper[4875]: I1007 08:13:55.899258 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8c8adc3b-9b0c-4fda-b20c-59d4c455fe72-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "8c8adc3b-9b0c-4fda-b20c-59d4c455fe72" (UID: "8c8adc3b-9b0c-4fda-b20c-59d4c455fe72"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:13:55 crc kubenswrapper[4875]: I1007 08:13:55.913862 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8c8adc3b-9b0c-4fda-b20c-59d4c455fe72-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8c8adc3b-9b0c-4fda-b20c-59d4c455fe72" (UID: "8c8adc3b-9b0c-4fda-b20c-59d4c455fe72"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:13:55 crc kubenswrapper[4875]: I1007 08:13:55.925148 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8c8adc3b-9b0c-4fda-b20c-59d4c455fe72-config-data" (OuterVolumeSpecName: "config-data") pod "8c8adc3b-9b0c-4fda-b20c-59d4c455fe72" (UID: "8c8adc3b-9b0c-4fda-b20c-59d4c455fe72"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:13:55 crc kubenswrapper[4875]: I1007 08:13:55.977016 4875 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8c8adc3b-9b0c-4fda-b20c-59d4c455fe72-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 08:13:55 crc kubenswrapper[4875]: I1007 08:13:55.977059 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xvt72\" (UniqueName: \"kubernetes.io/projected/8c8adc3b-9b0c-4fda-b20c-59d4c455fe72-kube-api-access-xvt72\") on node \"crc\" DevicePath \"\"" Oct 07 08:13:55 crc kubenswrapper[4875]: I1007 08:13:55.977073 4875 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8c8adc3b-9b0c-4fda-b20c-59d4c455fe72-config-data\") on node \"crc\" DevicePath \"\"" Oct 07 08:13:55 crc kubenswrapper[4875]: I1007 08:13:55.977081 4875 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/8c8adc3b-9b0c-4fda-b20c-59d4c455fe72-fernet-keys\") on node \"crc\" DevicePath \"\"" Oct 07 08:13:55 crc kubenswrapper[4875]: I1007 08:13:55.977092 4875 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/8c8adc3b-9b0c-4fda-b20c-59d4c455fe72-credential-keys\") on node \"crc\" DevicePath \"\"" Oct 07 08:13:55 crc kubenswrapper[4875]: I1007 08:13:55.977100 4875 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8c8adc3b-9b0c-4fda-b20c-59d4c455fe72-scripts\") on node \"crc\" DevicePath \"\"" Oct 07 08:13:56 crc kubenswrapper[4875]: I1007 08:13:56.505424 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-48kgl" event={"ID":"8c8adc3b-9b0c-4fda-b20c-59d4c455fe72","Type":"ContainerDied","Data":"650b52735dc8c94cefd9deb100f5f603a0122ecfab0e8ba3c1e302d31b042d7e"} Oct 07 08:13:56 crc kubenswrapper[4875]: I1007 08:13:56.505487 4875 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="650b52735dc8c94cefd9deb100f5f603a0122ecfab0e8ba3c1e302d31b042d7e" Oct 07 08:13:56 crc kubenswrapper[4875]: I1007 08:13:56.505559 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-48kgl" Oct 07 08:13:56 crc kubenswrapper[4875]: I1007 08:13:56.778159 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Oct 07 08:13:56 crc kubenswrapper[4875]: I1007 08:13:56.784581 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Oct 07 08:13:56 crc kubenswrapper[4875]: I1007 08:13:56.889342 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-754566d8d4-jzmbw"] Oct 07 08:13:56 crc kubenswrapper[4875]: E1007 08:13:56.889946 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8c8adc3b-9b0c-4fda-b20c-59d4c455fe72" containerName="keystone-bootstrap" Oct 07 08:13:56 crc kubenswrapper[4875]: I1007 08:13:56.889976 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="8c8adc3b-9b0c-4fda-b20c-59d4c455fe72" containerName="keystone-bootstrap" Oct 07 08:13:56 crc kubenswrapper[4875]: I1007 08:13:56.890256 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="8c8adc3b-9b0c-4fda-b20c-59d4c455fe72" containerName="keystone-bootstrap" Oct 07 08:13:56 crc kubenswrapper[4875]: I1007 08:13:56.891147 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-754566d8d4-jzmbw" Oct 07 08:13:56 crc kubenswrapper[4875]: I1007 08:13:56.903920 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-internal-svc" Oct 07 08:13:56 crc kubenswrapper[4875]: I1007 08:13:56.904261 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Oct 07 08:13:56 crc kubenswrapper[4875]: I1007 08:13:56.904513 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-public-svc" Oct 07 08:13:56 crc kubenswrapper[4875]: I1007 08:13:56.904687 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-5bgfb" Oct 07 08:13:56 crc kubenswrapper[4875]: I1007 08:13:56.904840 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Oct 07 08:13:56 crc kubenswrapper[4875]: I1007 08:13:56.905895 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Oct 07 08:13:56 crc kubenswrapper[4875]: I1007 08:13:56.920421 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-754566d8d4-jzmbw"] Oct 07 08:13:57 crc kubenswrapper[4875]: I1007 08:13:57.002155 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/bcdbebc0-2911-4723-9a61-718037d0d1dc-public-tls-certs\") pod \"keystone-754566d8d4-jzmbw\" (UID: \"bcdbebc0-2911-4723-9a61-718037d0d1dc\") " pod="openstack/keystone-754566d8d4-jzmbw" Oct 07 08:13:57 crc kubenswrapper[4875]: I1007 08:13:57.002237 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/bcdbebc0-2911-4723-9a61-718037d0d1dc-internal-tls-certs\") pod \"keystone-754566d8d4-jzmbw\" (UID: \"bcdbebc0-2911-4723-9a61-718037d0d1dc\") " pod="openstack/keystone-754566d8d4-jzmbw" Oct 07 08:13:57 crc kubenswrapper[4875]: I1007 08:13:57.002299 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/bcdbebc0-2911-4723-9a61-718037d0d1dc-fernet-keys\") pod \"keystone-754566d8d4-jzmbw\" (UID: \"bcdbebc0-2911-4723-9a61-718037d0d1dc\") " pod="openstack/keystone-754566d8d4-jzmbw" Oct 07 08:13:57 crc kubenswrapper[4875]: I1007 08:13:57.002323 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bcdbebc0-2911-4723-9a61-718037d0d1dc-config-data\") pod \"keystone-754566d8d4-jzmbw\" (UID: \"bcdbebc0-2911-4723-9a61-718037d0d1dc\") " pod="openstack/keystone-754566d8d4-jzmbw" Oct 07 08:13:57 crc kubenswrapper[4875]: I1007 08:13:57.002494 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/bcdbebc0-2911-4723-9a61-718037d0d1dc-credential-keys\") pod \"keystone-754566d8d4-jzmbw\" (UID: \"bcdbebc0-2911-4723-9a61-718037d0d1dc\") " pod="openstack/keystone-754566d8d4-jzmbw" Oct 07 08:13:57 crc kubenswrapper[4875]: I1007 08:13:57.002925 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w5hq6\" (UniqueName: \"kubernetes.io/projected/bcdbebc0-2911-4723-9a61-718037d0d1dc-kube-api-access-w5hq6\") pod \"keystone-754566d8d4-jzmbw\" (UID: \"bcdbebc0-2911-4723-9a61-718037d0d1dc\") " pod="openstack/keystone-754566d8d4-jzmbw" Oct 07 08:13:57 crc kubenswrapper[4875]: I1007 08:13:57.003094 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bcdbebc0-2911-4723-9a61-718037d0d1dc-combined-ca-bundle\") pod \"keystone-754566d8d4-jzmbw\" (UID: \"bcdbebc0-2911-4723-9a61-718037d0d1dc\") " pod="openstack/keystone-754566d8d4-jzmbw" Oct 07 08:13:57 crc kubenswrapper[4875]: I1007 08:13:57.003279 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bcdbebc0-2911-4723-9a61-718037d0d1dc-scripts\") pod \"keystone-754566d8d4-jzmbw\" (UID: \"bcdbebc0-2911-4723-9a61-718037d0d1dc\") " pod="openstack/keystone-754566d8d4-jzmbw" Oct 07 08:13:57 crc kubenswrapper[4875]: I1007 08:13:57.105007 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/bcdbebc0-2911-4723-9a61-718037d0d1dc-public-tls-certs\") pod \"keystone-754566d8d4-jzmbw\" (UID: \"bcdbebc0-2911-4723-9a61-718037d0d1dc\") " pod="openstack/keystone-754566d8d4-jzmbw" Oct 07 08:13:57 crc kubenswrapper[4875]: I1007 08:13:57.105087 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/bcdbebc0-2911-4723-9a61-718037d0d1dc-internal-tls-certs\") pod \"keystone-754566d8d4-jzmbw\" (UID: \"bcdbebc0-2911-4723-9a61-718037d0d1dc\") " pod="openstack/keystone-754566d8d4-jzmbw" Oct 07 08:13:57 crc kubenswrapper[4875]: I1007 08:13:57.105133 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/bcdbebc0-2911-4723-9a61-718037d0d1dc-fernet-keys\") pod \"keystone-754566d8d4-jzmbw\" (UID: \"bcdbebc0-2911-4723-9a61-718037d0d1dc\") " pod="openstack/keystone-754566d8d4-jzmbw" Oct 07 08:13:57 crc kubenswrapper[4875]: I1007 08:13:57.105154 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bcdbebc0-2911-4723-9a61-718037d0d1dc-config-data\") pod \"keystone-754566d8d4-jzmbw\" (UID: \"bcdbebc0-2911-4723-9a61-718037d0d1dc\") " pod="openstack/keystone-754566d8d4-jzmbw" Oct 07 08:13:57 crc kubenswrapper[4875]: I1007 08:13:57.105184 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/bcdbebc0-2911-4723-9a61-718037d0d1dc-credential-keys\") pod \"keystone-754566d8d4-jzmbw\" (UID: \"bcdbebc0-2911-4723-9a61-718037d0d1dc\") " pod="openstack/keystone-754566d8d4-jzmbw" Oct 07 08:13:57 crc kubenswrapper[4875]: I1007 08:13:57.105239 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w5hq6\" (UniqueName: \"kubernetes.io/projected/bcdbebc0-2911-4723-9a61-718037d0d1dc-kube-api-access-w5hq6\") pod \"keystone-754566d8d4-jzmbw\" (UID: \"bcdbebc0-2911-4723-9a61-718037d0d1dc\") " pod="openstack/keystone-754566d8d4-jzmbw" Oct 07 08:13:57 crc kubenswrapper[4875]: I1007 08:13:57.105264 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bcdbebc0-2911-4723-9a61-718037d0d1dc-combined-ca-bundle\") pod \"keystone-754566d8d4-jzmbw\" (UID: \"bcdbebc0-2911-4723-9a61-718037d0d1dc\") " pod="openstack/keystone-754566d8d4-jzmbw" Oct 07 08:13:57 crc kubenswrapper[4875]: I1007 08:13:57.105293 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bcdbebc0-2911-4723-9a61-718037d0d1dc-scripts\") pod \"keystone-754566d8d4-jzmbw\" (UID: \"bcdbebc0-2911-4723-9a61-718037d0d1dc\") " pod="openstack/keystone-754566d8d4-jzmbw" Oct 07 08:13:57 crc kubenswrapper[4875]: I1007 08:13:57.117049 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/bcdbebc0-2911-4723-9a61-718037d0d1dc-internal-tls-certs\") pod \"keystone-754566d8d4-jzmbw\" (UID: \"bcdbebc0-2911-4723-9a61-718037d0d1dc\") " pod="openstack/keystone-754566d8d4-jzmbw" Oct 07 08:13:57 crc kubenswrapper[4875]: I1007 08:13:57.117738 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/bcdbebc0-2911-4723-9a61-718037d0d1dc-public-tls-certs\") pod \"keystone-754566d8d4-jzmbw\" (UID: \"bcdbebc0-2911-4723-9a61-718037d0d1dc\") " pod="openstack/keystone-754566d8d4-jzmbw" Oct 07 08:13:57 crc kubenswrapper[4875]: I1007 08:13:57.118140 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bcdbebc0-2911-4723-9a61-718037d0d1dc-scripts\") pod \"keystone-754566d8d4-jzmbw\" (UID: \"bcdbebc0-2911-4723-9a61-718037d0d1dc\") " pod="openstack/keystone-754566d8d4-jzmbw" Oct 07 08:13:57 crc kubenswrapper[4875]: I1007 08:13:57.125561 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/bcdbebc0-2911-4723-9a61-718037d0d1dc-fernet-keys\") pod \"keystone-754566d8d4-jzmbw\" (UID: \"bcdbebc0-2911-4723-9a61-718037d0d1dc\") " pod="openstack/keystone-754566d8d4-jzmbw" Oct 07 08:13:57 crc kubenswrapper[4875]: I1007 08:13:57.129038 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bcdbebc0-2911-4723-9a61-718037d0d1dc-config-data\") pod \"keystone-754566d8d4-jzmbw\" (UID: \"bcdbebc0-2911-4723-9a61-718037d0d1dc\") " pod="openstack/keystone-754566d8d4-jzmbw" Oct 07 08:13:57 crc kubenswrapper[4875]: I1007 08:13:57.133638 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bcdbebc0-2911-4723-9a61-718037d0d1dc-combined-ca-bundle\") pod \"keystone-754566d8d4-jzmbw\" (UID: \"bcdbebc0-2911-4723-9a61-718037d0d1dc\") " pod="openstack/keystone-754566d8d4-jzmbw" Oct 07 08:13:57 crc kubenswrapper[4875]: I1007 08:13:57.150295 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w5hq6\" (UniqueName: \"kubernetes.io/projected/bcdbebc0-2911-4723-9a61-718037d0d1dc-kube-api-access-w5hq6\") pod \"keystone-754566d8d4-jzmbw\" (UID: \"bcdbebc0-2911-4723-9a61-718037d0d1dc\") " pod="openstack/keystone-754566d8d4-jzmbw" Oct 07 08:13:57 crc kubenswrapper[4875]: I1007 08:13:57.175818 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/bcdbebc0-2911-4723-9a61-718037d0d1dc-credential-keys\") pod \"keystone-754566d8d4-jzmbw\" (UID: \"bcdbebc0-2911-4723-9a61-718037d0d1dc\") " pod="openstack/keystone-754566d8d4-jzmbw" Oct 07 08:13:57 crc kubenswrapper[4875]: I1007 08:13:57.214569 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-754566d8d4-jzmbw" Oct 07 08:13:57 crc kubenswrapper[4875]: I1007 08:13:57.523082 4875 generic.go:334] "Generic (PLEG): container finished" podID="7ede5949-4681-4699-befa-f13a645d1f4c" containerID="00e1f17dc0358d3e11a2a727768e6a2e01b317e6de939a3a7da46419b96fc8ce" exitCode=0 Oct 07 08:13:57 crc kubenswrapper[4875]: I1007 08:13:57.523166 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-km8j5" event={"ID":"7ede5949-4681-4699-befa-f13a645d1f4c","Type":"ContainerDied","Data":"00e1f17dc0358d3e11a2a727768e6a2e01b317e6de939a3a7da46419b96fc8ce"} Oct 07 08:13:57 crc kubenswrapper[4875]: I1007 08:13:57.663803 4875 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Oct 07 08:13:57 crc kubenswrapper[4875]: I1007 08:13:57.663906 4875 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Oct 07 08:13:57 crc kubenswrapper[4875]: I1007 08:13:57.716948 4875 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Oct 07 08:13:57 crc kubenswrapper[4875]: I1007 08:13:57.733119 4875 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Oct 07 08:13:58 crc kubenswrapper[4875]: I1007 08:13:58.538220 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Oct 07 08:13:58 crc kubenswrapper[4875]: I1007 08:13:58.538616 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Oct 07 08:13:58 crc kubenswrapper[4875]: I1007 08:13:58.587269 4875 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-8486dbbd8b-5n679" Oct 07 08:13:58 crc kubenswrapper[4875]: I1007 08:13:58.591552 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-8486dbbd8b-5n679" Oct 07 08:13:58 crc kubenswrapper[4875]: I1007 08:13:58.665176 4875 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-64854f4c8-d67s8" Oct 07 08:13:58 crc kubenswrapper[4875]: I1007 08:13:58.665221 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-64854f4c8-d67s8" Oct 07 08:13:58 crc kubenswrapper[4875]: I1007 08:13:58.889232 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-754566d8d4-jzmbw"] Oct 07 08:13:58 crc kubenswrapper[4875]: I1007 08:13:58.956808 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-548b9747d4-bkwjt"] Oct 07 08:13:59 crc kubenswrapper[4875]: I1007 08:13:59.006766 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-km8j5" Oct 07 08:13:59 crc kubenswrapper[4875]: I1007 08:13:59.157711 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7ede5949-4681-4699-befa-f13a645d1f4c-combined-ca-bundle\") pod \"7ede5949-4681-4699-befa-f13a645d1f4c\" (UID: \"7ede5949-4681-4699-befa-f13a645d1f4c\") " Oct 07 08:13:59 crc kubenswrapper[4875]: I1007 08:13:59.157830 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/7ede5949-4681-4699-befa-f13a645d1f4c-db-sync-config-data\") pod \"7ede5949-4681-4699-befa-f13a645d1f4c\" (UID: \"7ede5949-4681-4699-befa-f13a645d1f4c\") " Oct 07 08:13:59 crc kubenswrapper[4875]: I1007 08:13:59.158008 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ntpkv\" (UniqueName: \"kubernetes.io/projected/7ede5949-4681-4699-befa-f13a645d1f4c-kube-api-access-ntpkv\") pod \"7ede5949-4681-4699-befa-f13a645d1f4c\" (UID: \"7ede5949-4681-4699-befa-f13a645d1f4c\") " Oct 07 08:13:59 crc kubenswrapper[4875]: I1007 08:13:59.165029 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7ede5949-4681-4699-befa-f13a645d1f4c-kube-api-access-ntpkv" (OuterVolumeSpecName: "kube-api-access-ntpkv") pod "7ede5949-4681-4699-befa-f13a645d1f4c" (UID: "7ede5949-4681-4699-befa-f13a645d1f4c"). InnerVolumeSpecName "kube-api-access-ntpkv". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 08:13:59 crc kubenswrapper[4875]: I1007 08:13:59.165728 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7ede5949-4681-4699-befa-f13a645d1f4c-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "7ede5949-4681-4699-befa-f13a645d1f4c" (UID: "7ede5949-4681-4699-befa-f13a645d1f4c"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:13:59 crc kubenswrapper[4875]: I1007 08:13:59.207678 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7ede5949-4681-4699-befa-f13a645d1f4c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7ede5949-4681-4699-befa-f13a645d1f4c" (UID: "7ede5949-4681-4699-befa-f13a645d1f4c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:13:59 crc kubenswrapper[4875]: I1007 08:13:59.260483 4875 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/7ede5949-4681-4699-befa-f13a645d1f4c-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Oct 07 08:13:59 crc kubenswrapper[4875]: I1007 08:13:59.260516 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ntpkv\" (UniqueName: \"kubernetes.io/projected/7ede5949-4681-4699-befa-f13a645d1f4c-kube-api-access-ntpkv\") on node \"crc\" DevicePath \"\"" Oct 07 08:13:59 crc kubenswrapper[4875]: I1007 08:13:59.260527 4875 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7ede5949-4681-4699-befa-f13a645d1f4c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 08:13:59 crc kubenswrapper[4875]: I1007 08:13:59.568738 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-548b9747d4-bkwjt" event={"ID":"ba464fba-e931-4f8a-be56-6b5456e1572d","Type":"ContainerStarted","Data":"fb5f01d79fe04f98d73e37404d02e8b43be4a71c4f173097f6d461f79c40ac5b"} Oct 07 08:13:59 crc kubenswrapper[4875]: I1007 08:13:59.568797 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-548b9747d4-bkwjt" event={"ID":"ba464fba-e931-4f8a-be56-6b5456e1572d","Type":"ContainerStarted","Data":"48789f061a0b60317e6f23625a91c0c6504811952e8d4d5ad578ae64f8ba402e"} Oct 07 08:13:59 crc kubenswrapper[4875]: I1007 08:13:59.568812 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-548b9747d4-bkwjt" event={"ID":"ba464fba-e931-4f8a-be56-6b5456e1572d","Type":"ContainerStarted","Data":"0c4a47404781226dfa791a1d46e5a107a6c357cff970dad698d04bd4ea6418a1"} Oct 07 08:13:59 crc kubenswrapper[4875]: I1007 08:13:59.570070 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-548b9747d4-bkwjt" Oct 07 08:13:59 crc kubenswrapper[4875]: I1007 08:13:59.570102 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-548b9747d4-bkwjt" Oct 07 08:13:59 crc kubenswrapper[4875]: I1007 08:13:59.587752 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-754566d8d4-jzmbw" event={"ID":"bcdbebc0-2911-4723-9a61-718037d0d1dc","Type":"ContainerStarted","Data":"6d12911e5f50bab9fb42193cc149566fb5079e544b0beb4813db687f9a8a47b7"} Oct 07 08:13:59 crc kubenswrapper[4875]: I1007 08:13:59.587789 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-754566d8d4-jzmbw" event={"ID":"bcdbebc0-2911-4723-9a61-718037d0d1dc","Type":"ContainerStarted","Data":"95e9d464fc1db7e47e21c70c234bc7862763a7761d222184b03185127132a1d7"} Oct 07 08:13:59 crc kubenswrapper[4875]: I1007 08:13:59.588635 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/keystone-754566d8d4-jzmbw" Oct 07 08:13:59 crc kubenswrapper[4875]: I1007 08:13:59.609570 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-548b9747d4-bkwjt" podStartSLOduration=6.609552089 podStartE2EDuration="6.609552089s" podCreationTimestamp="2025-10-07 08:13:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 08:13:59.597487563 +0000 UTC m=+1064.557258116" watchObservedRunningTime="2025-10-07 08:13:59.609552089 +0000 UTC m=+1064.569322632" Oct 07 08:13:59 crc kubenswrapper[4875]: I1007 08:13:59.611085 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-km8j5" event={"ID":"7ede5949-4681-4699-befa-f13a645d1f4c","Type":"ContainerDied","Data":"95d902de709c12ce4314a9047574e5c867a8306aad0a734d0c6b577b6f28a714"} Oct 07 08:13:59 crc kubenswrapper[4875]: I1007 08:13:59.611122 4875 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="95d902de709c12ce4314a9047574e5c867a8306aad0a734d0c6b577b6f28a714" Oct 07 08:13:59 crc kubenswrapper[4875]: I1007 08:13:59.611178 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-km8j5" Oct 07 08:13:59 crc kubenswrapper[4875]: I1007 08:13:59.616945 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0149ac72-d21f-45a8-a322-a97d03c1d85a","Type":"ContainerStarted","Data":"39da007b9e9dd5e515680790376915507daa320030eab5508889ef386920526b"} Oct 07 08:13:59 crc kubenswrapper[4875]: I1007 08:13:59.656096 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-754566d8d4-jzmbw" podStartSLOduration=3.656034761 podStartE2EDuration="3.656034761s" podCreationTimestamp="2025-10-07 08:13:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 08:13:59.642681814 +0000 UTC m=+1064.602452357" watchObservedRunningTime="2025-10-07 08:13:59.656034761 +0000 UTC m=+1064.615805314" Oct 07 08:13:59 crc kubenswrapper[4875]: I1007 08:13:59.917800 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-worker-576fbf56f5-4mtlh"] Oct 07 08:13:59 crc kubenswrapper[4875]: E1007 08:13:59.922155 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7ede5949-4681-4699-befa-f13a645d1f4c" containerName="barbican-db-sync" Oct 07 08:13:59 crc kubenswrapper[4875]: I1007 08:13:59.922229 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="7ede5949-4681-4699-befa-f13a645d1f4c" containerName="barbican-db-sync" Oct 07 08:13:59 crc kubenswrapper[4875]: I1007 08:13:59.929041 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="7ede5949-4681-4699-befa-f13a645d1f4c" containerName="barbican-db-sync" Oct 07 08:13:59 crc kubenswrapper[4875]: I1007 08:13:59.931409 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-576fbf56f5-4mtlh" Oct 07 08:13:59 crc kubenswrapper[4875]: I1007 08:13:59.946474 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-worker-config-data" Oct 07 08:13:59 crc kubenswrapper[4875]: I1007 08:13:59.946553 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-ls5zf" Oct 07 08:13:59 crc kubenswrapper[4875]: I1007 08:13:59.947027 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Oct 07 08:13:59 crc kubenswrapper[4875]: I1007 08:13:59.948083 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-576fbf56f5-4mtlh"] Oct 07 08:13:59 crc kubenswrapper[4875]: I1007 08:13:59.965806 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-keystone-listener-5dbb8f897d-wgrt2"] Oct 07 08:13:59 crc kubenswrapper[4875]: I1007 08:13:59.969060 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-5dbb8f897d-wgrt2" Oct 07 08:13:59 crc kubenswrapper[4875]: I1007 08:13:59.976263 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-keystone-listener-config-data" Oct 07 08:13:59 crc kubenswrapper[4875]: I1007 08:13:59.982564 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-7c67bffd47-7tr7b"] Oct 07 08:13:59 crc kubenswrapper[4875]: I1007 08:13:59.984579 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7c67bffd47-7tr7b" Oct 07 08:13:59 crc kubenswrapper[4875]: I1007 08:13:59.985155 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3021a32f-0a9f-4dea-8da2-2ae1df754ccc-config-data-custom\") pod \"barbican-worker-576fbf56f5-4mtlh\" (UID: \"3021a32f-0a9f-4dea-8da2-2ae1df754ccc\") " pod="openstack/barbican-worker-576fbf56f5-4mtlh" Oct 07 08:13:59 crc kubenswrapper[4875]: I1007 08:13:59.985260 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3021a32f-0a9f-4dea-8da2-2ae1df754ccc-logs\") pod \"barbican-worker-576fbf56f5-4mtlh\" (UID: \"3021a32f-0a9f-4dea-8da2-2ae1df754ccc\") " pod="openstack/barbican-worker-576fbf56f5-4mtlh" Oct 07 08:13:59 crc kubenswrapper[4875]: I1007 08:13:59.997924 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c2pbr\" (UniqueName: \"kubernetes.io/projected/3021a32f-0a9f-4dea-8da2-2ae1df754ccc-kube-api-access-c2pbr\") pod \"barbican-worker-576fbf56f5-4mtlh\" (UID: \"3021a32f-0a9f-4dea-8da2-2ae1df754ccc\") " pod="openstack/barbican-worker-576fbf56f5-4mtlh" Oct 07 08:13:59 crc kubenswrapper[4875]: I1007 08:13:59.998150 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3021a32f-0a9f-4dea-8da2-2ae1df754ccc-combined-ca-bundle\") pod \"barbican-worker-576fbf56f5-4mtlh\" (UID: \"3021a32f-0a9f-4dea-8da2-2ae1df754ccc\") " pod="openstack/barbican-worker-576fbf56f5-4mtlh" Oct 07 08:13:59 crc kubenswrapper[4875]: I1007 08:13:59.998396 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3021a32f-0a9f-4dea-8da2-2ae1df754ccc-config-data\") pod \"barbican-worker-576fbf56f5-4mtlh\" (UID: \"3021a32f-0a9f-4dea-8da2-2ae1df754ccc\") " pod="openstack/barbican-worker-576fbf56f5-4mtlh" Oct 07 08:14:00 crc kubenswrapper[4875]: I1007 08:14:00.002989 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-5dbb8f897d-wgrt2"] Oct 07 08:14:00 crc kubenswrapper[4875]: I1007 08:14:00.013032 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7c67bffd47-7tr7b"] Oct 07 08:14:00 crc kubenswrapper[4875]: I1007 08:14:00.047243 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-6f66dddcf4-7856h"] Oct 07 08:14:00 crc kubenswrapper[4875]: I1007 08:14:00.049248 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-6f66dddcf4-7856h" Oct 07 08:14:00 crc kubenswrapper[4875]: I1007 08:14:00.055335 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-api-config-data" Oct 07 08:14:00 crc kubenswrapper[4875]: I1007 08:14:00.083367 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-6f66dddcf4-7856h"] Oct 07 08:14:00 crc kubenswrapper[4875]: I1007 08:14:00.099855 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3021a32f-0a9f-4dea-8da2-2ae1df754ccc-logs\") pod \"barbican-worker-576fbf56f5-4mtlh\" (UID: \"3021a32f-0a9f-4dea-8da2-2ae1df754ccc\") " pod="openstack/barbican-worker-576fbf56f5-4mtlh" Oct 07 08:14:00 crc kubenswrapper[4875]: I1007 08:14:00.099931 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/71b8b921-955b-48c7-ad48-d07a99b590f1-dns-svc\") pod \"dnsmasq-dns-7c67bffd47-7tr7b\" (UID: \"71b8b921-955b-48c7-ad48-d07a99b590f1\") " pod="openstack/dnsmasq-dns-7c67bffd47-7tr7b" Oct 07 08:14:00 crc kubenswrapper[4875]: I1007 08:14:00.099976 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5c001ba7-ccf7-40ea-85b4-4ba3c981ac0e-logs\") pod \"barbican-keystone-listener-5dbb8f897d-wgrt2\" (UID: \"5c001ba7-ccf7-40ea-85b4-4ba3c981ac0e\") " pod="openstack/barbican-keystone-listener-5dbb8f897d-wgrt2" Oct 07 08:14:00 crc kubenswrapper[4875]: I1007 08:14:00.100028 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/5c001ba7-ccf7-40ea-85b4-4ba3c981ac0e-config-data-custom\") pod \"barbican-keystone-listener-5dbb8f897d-wgrt2\" (UID: \"5c001ba7-ccf7-40ea-85b4-4ba3c981ac0e\") " pod="openstack/barbican-keystone-listener-5dbb8f897d-wgrt2" Oct 07 08:14:00 crc kubenswrapper[4875]: I1007 08:14:00.100083 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/71b8b921-955b-48c7-ad48-d07a99b590f1-dns-swift-storage-0\") pod \"dnsmasq-dns-7c67bffd47-7tr7b\" (UID: \"71b8b921-955b-48c7-ad48-d07a99b590f1\") " pod="openstack/dnsmasq-dns-7c67bffd47-7tr7b" Oct 07 08:14:00 crc kubenswrapper[4875]: I1007 08:14:00.100105 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5c001ba7-ccf7-40ea-85b4-4ba3c981ac0e-config-data\") pod \"barbican-keystone-listener-5dbb8f897d-wgrt2\" (UID: \"5c001ba7-ccf7-40ea-85b4-4ba3c981ac0e\") " pod="openstack/barbican-keystone-listener-5dbb8f897d-wgrt2" Oct 07 08:14:00 crc kubenswrapper[4875]: I1007 08:14:00.100127 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c2pbr\" (UniqueName: \"kubernetes.io/projected/3021a32f-0a9f-4dea-8da2-2ae1df754ccc-kube-api-access-c2pbr\") pod \"barbican-worker-576fbf56f5-4mtlh\" (UID: \"3021a32f-0a9f-4dea-8da2-2ae1df754ccc\") " pod="openstack/barbican-worker-576fbf56f5-4mtlh" Oct 07 08:14:00 crc kubenswrapper[4875]: I1007 08:14:00.100148 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d86ba6fc-0914-42b4-a080-94a2173841bf-config-data-custom\") pod \"barbican-api-6f66dddcf4-7856h\" (UID: \"d86ba6fc-0914-42b4-a080-94a2173841bf\") " pod="openstack/barbican-api-6f66dddcf4-7856h" Oct 07 08:14:00 crc kubenswrapper[4875]: I1007 08:14:00.100175 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d86ba6fc-0914-42b4-a080-94a2173841bf-config-data\") pod \"barbican-api-6f66dddcf4-7856h\" (UID: \"d86ba6fc-0914-42b4-a080-94a2173841bf\") " pod="openstack/barbican-api-6f66dddcf4-7856h" Oct 07 08:14:00 crc kubenswrapper[4875]: I1007 08:14:00.100211 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-chpzc\" (UniqueName: \"kubernetes.io/projected/d86ba6fc-0914-42b4-a080-94a2173841bf-kube-api-access-chpzc\") pod \"barbican-api-6f66dddcf4-7856h\" (UID: \"d86ba6fc-0914-42b4-a080-94a2173841bf\") " pod="openstack/barbican-api-6f66dddcf4-7856h" Oct 07 08:14:00 crc kubenswrapper[4875]: I1007 08:14:00.100239 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/71b8b921-955b-48c7-ad48-d07a99b590f1-config\") pod \"dnsmasq-dns-7c67bffd47-7tr7b\" (UID: \"71b8b921-955b-48c7-ad48-d07a99b590f1\") " pod="openstack/dnsmasq-dns-7c67bffd47-7tr7b" Oct 07 08:14:00 crc kubenswrapper[4875]: I1007 08:14:00.100267 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d86ba6fc-0914-42b4-a080-94a2173841bf-combined-ca-bundle\") pod \"barbican-api-6f66dddcf4-7856h\" (UID: \"d86ba6fc-0914-42b4-a080-94a2173841bf\") " pod="openstack/barbican-api-6f66dddcf4-7856h" Oct 07 08:14:00 crc kubenswrapper[4875]: I1007 08:14:00.100295 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3021a32f-0a9f-4dea-8da2-2ae1df754ccc-combined-ca-bundle\") pod \"barbican-worker-576fbf56f5-4mtlh\" (UID: \"3021a32f-0a9f-4dea-8da2-2ae1df754ccc\") " pod="openstack/barbican-worker-576fbf56f5-4mtlh" Oct 07 08:14:00 crc kubenswrapper[4875]: I1007 08:14:00.100329 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5c001ba7-ccf7-40ea-85b4-4ba3c981ac0e-combined-ca-bundle\") pod \"barbican-keystone-listener-5dbb8f897d-wgrt2\" (UID: \"5c001ba7-ccf7-40ea-85b4-4ba3c981ac0e\") " pod="openstack/barbican-keystone-listener-5dbb8f897d-wgrt2" Oct 07 08:14:00 crc kubenswrapper[4875]: I1007 08:14:00.100356 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9l687\" (UniqueName: \"kubernetes.io/projected/71b8b921-955b-48c7-ad48-d07a99b590f1-kube-api-access-9l687\") pod \"dnsmasq-dns-7c67bffd47-7tr7b\" (UID: \"71b8b921-955b-48c7-ad48-d07a99b590f1\") " pod="openstack/dnsmasq-dns-7c67bffd47-7tr7b" Oct 07 08:14:00 crc kubenswrapper[4875]: I1007 08:14:00.100385 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/71b8b921-955b-48c7-ad48-d07a99b590f1-ovsdbserver-nb\") pod \"dnsmasq-dns-7c67bffd47-7tr7b\" (UID: \"71b8b921-955b-48c7-ad48-d07a99b590f1\") " pod="openstack/dnsmasq-dns-7c67bffd47-7tr7b" Oct 07 08:14:00 crc kubenswrapper[4875]: I1007 08:14:00.100420 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3021a32f-0a9f-4dea-8da2-2ae1df754ccc-config-data\") pod \"barbican-worker-576fbf56f5-4mtlh\" (UID: \"3021a32f-0a9f-4dea-8da2-2ae1df754ccc\") " pod="openstack/barbican-worker-576fbf56f5-4mtlh" Oct 07 08:14:00 crc kubenswrapper[4875]: I1007 08:14:00.100462 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/71b8b921-955b-48c7-ad48-d07a99b590f1-ovsdbserver-sb\") pod \"dnsmasq-dns-7c67bffd47-7tr7b\" (UID: \"71b8b921-955b-48c7-ad48-d07a99b590f1\") " pod="openstack/dnsmasq-dns-7c67bffd47-7tr7b" Oct 07 08:14:00 crc kubenswrapper[4875]: I1007 08:14:00.100484 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bt2vq\" (UniqueName: \"kubernetes.io/projected/5c001ba7-ccf7-40ea-85b4-4ba3c981ac0e-kube-api-access-bt2vq\") pod \"barbican-keystone-listener-5dbb8f897d-wgrt2\" (UID: \"5c001ba7-ccf7-40ea-85b4-4ba3c981ac0e\") " pod="openstack/barbican-keystone-listener-5dbb8f897d-wgrt2" Oct 07 08:14:00 crc kubenswrapper[4875]: I1007 08:14:00.100523 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3021a32f-0a9f-4dea-8da2-2ae1df754ccc-config-data-custom\") pod \"barbican-worker-576fbf56f5-4mtlh\" (UID: \"3021a32f-0a9f-4dea-8da2-2ae1df754ccc\") " pod="openstack/barbican-worker-576fbf56f5-4mtlh" Oct 07 08:14:00 crc kubenswrapper[4875]: I1007 08:14:00.100524 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3021a32f-0a9f-4dea-8da2-2ae1df754ccc-logs\") pod \"barbican-worker-576fbf56f5-4mtlh\" (UID: \"3021a32f-0a9f-4dea-8da2-2ae1df754ccc\") " pod="openstack/barbican-worker-576fbf56f5-4mtlh" Oct 07 08:14:00 crc kubenswrapper[4875]: I1007 08:14:00.100555 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d86ba6fc-0914-42b4-a080-94a2173841bf-logs\") pod \"barbican-api-6f66dddcf4-7856h\" (UID: \"d86ba6fc-0914-42b4-a080-94a2173841bf\") " pod="openstack/barbican-api-6f66dddcf4-7856h" Oct 07 08:14:00 crc kubenswrapper[4875]: I1007 08:14:00.111761 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3021a32f-0a9f-4dea-8da2-2ae1df754ccc-combined-ca-bundle\") pod \"barbican-worker-576fbf56f5-4mtlh\" (UID: \"3021a32f-0a9f-4dea-8da2-2ae1df754ccc\") " pod="openstack/barbican-worker-576fbf56f5-4mtlh" Oct 07 08:14:00 crc kubenswrapper[4875]: I1007 08:14:00.112869 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3021a32f-0a9f-4dea-8da2-2ae1df754ccc-config-data\") pod \"barbican-worker-576fbf56f5-4mtlh\" (UID: \"3021a32f-0a9f-4dea-8da2-2ae1df754ccc\") " pod="openstack/barbican-worker-576fbf56f5-4mtlh" Oct 07 08:14:00 crc kubenswrapper[4875]: I1007 08:14:00.114768 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3021a32f-0a9f-4dea-8da2-2ae1df754ccc-config-data-custom\") pod \"barbican-worker-576fbf56f5-4mtlh\" (UID: \"3021a32f-0a9f-4dea-8da2-2ae1df754ccc\") " pod="openstack/barbican-worker-576fbf56f5-4mtlh" Oct 07 08:14:00 crc kubenswrapper[4875]: I1007 08:14:00.120331 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c2pbr\" (UniqueName: \"kubernetes.io/projected/3021a32f-0a9f-4dea-8da2-2ae1df754ccc-kube-api-access-c2pbr\") pod \"barbican-worker-576fbf56f5-4mtlh\" (UID: \"3021a32f-0a9f-4dea-8da2-2ae1df754ccc\") " pod="openstack/barbican-worker-576fbf56f5-4mtlh" Oct 07 08:14:00 crc kubenswrapper[4875]: I1007 08:14:00.202754 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/71b8b921-955b-48c7-ad48-d07a99b590f1-config\") pod \"dnsmasq-dns-7c67bffd47-7tr7b\" (UID: \"71b8b921-955b-48c7-ad48-d07a99b590f1\") " pod="openstack/dnsmasq-dns-7c67bffd47-7tr7b" Oct 07 08:14:00 crc kubenswrapper[4875]: I1007 08:14:00.202799 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d86ba6fc-0914-42b4-a080-94a2173841bf-combined-ca-bundle\") pod \"barbican-api-6f66dddcf4-7856h\" (UID: \"d86ba6fc-0914-42b4-a080-94a2173841bf\") " pod="openstack/barbican-api-6f66dddcf4-7856h" Oct 07 08:14:00 crc kubenswrapper[4875]: I1007 08:14:00.202834 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5c001ba7-ccf7-40ea-85b4-4ba3c981ac0e-combined-ca-bundle\") pod \"barbican-keystone-listener-5dbb8f897d-wgrt2\" (UID: \"5c001ba7-ccf7-40ea-85b4-4ba3c981ac0e\") " pod="openstack/barbican-keystone-listener-5dbb8f897d-wgrt2" Oct 07 08:14:00 crc kubenswrapper[4875]: I1007 08:14:00.202862 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9l687\" (UniqueName: \"kubernetes.io/projected/71b8b921-955b-48c7-ad48-d07a99b590f1-kube-api-access-9l687\") pod \"dnsmasq-dns-7c67bffd47-7tr7b\" (UID: \"71b8b921-955b-48c7-ad48-d07a99b590f1\") " pod="openstack/dnsmasq-dns-7c67bffd47-7tr7b" Oct 07 08:14:00 crc kubenswrapper[4875]: I1007 08:14:00.202896 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/71b8b921-955b-48c7-ad48-d07a99b590f1-ovsdbserver-nb\") pod \"dnsmasq-dns-7c67bffd47-7tr7b\" (UID: \"71b8b921-955b-48c7-ad48-d07a99b590f1\") " pod="openstack/dnsmasq-dns-7c67bffd47-7tr7b" Oct 07 08:14:00 crc kubenswrapper[4875]: I1007 08:14:00.202921 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/71b8b921-955b-48c7-ad48-d07a99b590f1-ovsdbserver-sb\") pod \"dnsmasq-dns-7c67bffd47-7tr7b\" (UID: \"71b8b921-955b-48c7-ad48-d07a99b590f1\") " pod="openstack/dnsmasq-dns-7c67bffd47-7tr7b" Oct 07 08:14:00 crc kubenswrapper[4875]: I1007 08:14:00.202937 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bt2vq\" (UniqueName: \"kubernetes.io/projected/5c001ba7-ccf7-40ea-85b4-4ba3c981ac0e-kube-api-access-bt2vq\") pod \"barbican-keystone-listener-5dbb8f897d-wgrt2\" (UID: \"5c001ba7-ccf7-40ea-85b4-4ba3c981ac0e\") " pod="openstack/barbican-keystone-listener-5dbb8f897d-wgrt2" Oct 07 08:14:00 crc kubenswrapper[4875]: I1007 08:14:00.202974 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d86ba6fc-0914-42b4-a080-94a2173841bf-logs\") pod \"barbican-api-6f66dddcf4-7856h\" (UID: \"d86ba6fc-0914-42b4-a080-94a2173841bf\") " pod="openstack/barbican-api-6f66dddcf4-7856h" Oct 07 08:14:00 crc kubenswrapper[4875]: I1007 08:14:00.203010 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/71b8b921-955b-48c7-ad48-d07a99b590f1-dns-svc\") pod \"dnsmasq-dns-7c67bffd47-7tr7b\" (UID: \"71b8b921-955b-48c7-ad48-d07a99b590f1\") " pod="openstack/dnsmasq-dns-7c67bffd47-7tr7b" Oct 07 08:14:00 crc kubenswrapper[4875]: I1007 08:14:00.203041 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5c001ba7-ccf7-40ea-85b4-4ba3c981ac0e-logs\") pod \"barbican-keystone-listener-5dbb8f897d-wgrt2\" (UID: \"5c001ba7-ccf7-40ea-85b4-4ba3c981ac0e\") " pod="openstack/barbican-keystone-listener-5dbb8f897d-wgrt2" Oct 07 08:14:00 crc kubenswrapper[4875]: I1007 08:14:00.203061 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/5c001ba7-ccf7-40ea-85b4-4ba3c981ac0e-config-data-custom\") pod \"barbican-keystone-listener-5dbb8f897d-wgrt2\" (UID: \"5c001ba7-ccf7-40ea-85b4-4ba3c981ac0e\") " pod="openstack/barbican-keystone-listener-5dbb8f897d-wgrt2" Oct 07 08:14:00 crc kubenswrapper[4875]: I1007 08:14:00.203088 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/71b8b921-955b-48c7-ad48-d07a99b590f1-dns-swift-storage-0\") pod \"dnsmasq-dns-7c67bffd47-7tr7b\" (UID: \"71b8b921-955b-48c7-ad48-d07a99b590f1\") " pod="openstack/dnsmasq-dns-7c67bffd47-7tr7b" Oct 07 08:14:00 crc kubenswrapper[4875]: I1007 08:14:00.203105 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5c001ba7-ccf7-40ea-85b4-4ba3c981ac0e-config-data\") pod \"barbican-keystone-listener-5dbb8f897d-wgrt2\" (UID: \"5c001ba7-ccf7-40ea-85b4-4ba3c981ac0e\") " pod="openstack/barbican-keystone-listener-5dbb8f897d-wgrt2" Oct 07 08:14:00 crc kubenswrapper[4875]: I1007 08:14:00.203126 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d86ba6fc-0914-42b4-a080-94a2173841bf-config-data-custom\") pod \"barbican-api-6f66dddcf4-7856h\" (UID: \"d86ba6fc-0914-42b4-a080-94a2173841bf\") " pod="openstack/barbican-api-6f66dddcf4-7856h" Oct 07 08:14:00 crc kubenswrapper[4875]: I1007 08:14:00.203146 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d86ba6fc-0914-42b4-a080-94a2173841bf-config-data\") pod \"barbican-api-6f66dddcf4-7856h\" (UID: \"d86ba6fc-0914-42b4-a080-94a2173841bf\") " pod="openstack/barbican-api-6f66dddcf4-7856h" Oct 07 08:14:00 crc kubenswrapper[4875]: I1007 08:14:00.203173 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-chpzc\" (UniqueName: \"kubernetes.io/projected/d86ba6fc-0914-42b4-a080-94a2173841bf-kube-api-access-chpzc\") pod \"barbican-api-6f66dddcf4-7856h\" (UID: \"d86ba6fc-0914-42b4-a080-94a2173841bf\") " pod="openstack/barbican-api-6f66dddcf4-7856h" Oct 07 08:14:00 crc kubenswrapper[4875]: I1007 08:14:00.203845 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d86ba6fc-0914-42b4-a080-94a2173841bf-logs\") pod \"barbican-api-6f66dddcf4-7856h\" (UID: \"d86ba6fc-0914-42b4-a080-94a2173841bf\") " pod="openstack/barbican-api-6f66dddcf4-7856h" Oct 07 08:14:00 crc kubenswrapper[4875]: I1007 08:14:00.204408 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5c001ba7-ccf7-40ea-85b4-4ba3c981ac0e-logs\") pod \"barbican-keystone-listener-5dbb8f897d-wgrt2\" (UID: \"5c001ba7-ccf7-40ea-85b4-4ba3c981ac0e\") " pod="openstack/barbican-keystone-listener-5dbb8f897d-wgrt2" Oct 07 08:14:00 crc kubenswrapper[4875]: I1007 08:14:00.204801 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/71b8b921-955b-48c7-ad48-d07a99b590f1-ovsdbserver-sb\") pod \"dnsmasq-dns-7c67bffd47-7tr7b\" (UID: \"71b8b921-955b-48c7-ad48-d07a99b590f1\") " pod="openstack/dnsmasq-dns-7c67bffd47-7tr7b" Oct 07 08:14:00 crc kubenswrapper[4875]: I1007 08:14:00.204916 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/71b8b921-955b-48c7-ad48-d07a99b590f1-config\") pod \"dnsmasq-dns-7c67bffd47-7tr7b\" (UID: \"71b8b921-955b-48c7-ad48-d07a99b590f1\") " pod="openstack/dnsmasq-dns-7c67bffd47-7tr7b" Oct 07 08:14:00 crc kubenswrapper[4875]: I1007 08:14:00.205337 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/71b8b921-955b-48c7-ad48-d07a99b590f1-dns-svc\") pod \"dnsmasq-dns-7c67bffd47-7tr7b\" (UID: \"71b8b921-955b-48c7-ad48-d07a99b590f1\") " pod="openstack/dnsmasq-dns-7c67bffd47-7tr7b" Oct 07 08:14:00 crc kubenswrapper[4875]: I1007 08:14:00.205729 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/71b8b921-955b-48c7-ad48-d07a99b590f1-dns-swift-storage-0\") pod \"dnsmasq-dns-7c67bffd47-7tr7b\" (UID: \"71b8b921-955b-48c7-ad48-d07a99b590f1\") " pod="openstack/dnsmasq-dns-7c67bffd47-7tr7b" Oct 07 08:14:00 crc kubenswrapper[4875]: I1007 08:14:00.206146 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/71b8b921-955b-48c7-ad48-d07a99b590f1-ovsdbserver-nb\") pod \"dnsmasq-dns-7c67bffd47-7tr7b\" (UID: \"71b8b921-955b-48c7-ad48-d07a99b590f1\") " pod="openstack/dnsmasq-dns-7c67bffd47-7tr7b" Oct 07 08:14:00 crc kubenswrapper[4875]: I1007 08:14:00.210388 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d86ba6fc-0914-42b4-a080-94a2173841bf-combined-ca-bundle\") pod \"barbican-api-6f66dddcf4-7856h\" (UID: \"d86ba6fc-0914-42b4-a080-94a2173841bf\") " pod="openstack/barbican-api-6f66dddcf4-7856h" Oct 07 08:14:00 crc kubenswrapper[4875]: I1007 08:14:00.211348 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/5c001ba7-ccf7-40ea-85b4-4ba3c981ac0e-config-data-custom\") pod \"barbican-keystone-listener-5dbb8f897d-wgrt2\" (UID: \"5c001ba7-ccf7-40ea-85b4-4ba3c981ac0e\") " pod="openstack/barbican-keystone-listener-5dbb8f897d-wgrt2" Oct 07 08:14:00 crc kubenswrapper[4875]: I1007 08:14:00.213007 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5c001ba7-ccf7-40ea-85b4-4ba3c981ac0e-combined-ca-bundle\") pod \"barbican-keystone-listener-5dbb8f897d-wgrt2\" (UID: \"5c001ba7-ccf7-40ea-85b4-4ba3c981ac0e\") " pod="openstack/barbican-keystone-listener-5dbb8f897d-wgrt2" Oct 07 08:14:00 crc kubenswrapper[4875]: I1007 08:14:00.220270 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5c001ba7-ccf7-40ea-85b4-4ba3c981ac0e-config-data\") pod \"barbican-keystone-listener-5dbb8f897d-wgrt2\" (UID: \"5c001ba7-ccf7-40ea-85b4-4ba3c981ac0e\") " pod="openstack/barbican-keystone-listener-5dbb8f897d-wgrt2" Oct 07 08:14:00 crc kubenswrapper[4875]: I1007 08:14:00.224778 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d86ba6fc-0914-42b4-a080-94a2173841bf-config-data-custom\") pod \"barbican-api-6f66dddcf4-7856h\" (UID: \"d86ba6fc-0914-42b4-a080-94a2173841bf\") " pod="openstack/barbican-api-6f66dddcf4-7856h" Oct 07 08:14:00 crc kubenswrapper[4875]: I1007 08:14:00.225191 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d86ba6fc-0914-42b4-a080-94a2173841bf-config-data\") pod \"barbican-api-6f66dddcf4-7856h\" (UID: \"d86ba6fc-0914-42b4-a080-94a2173841bf\") " pod="openstack/barbican-api-6f66dddcf4-7856h" Oct 07 08:14:00 crc kubenswrapper[4875]: I1007 08:14:00.226655 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9l687\" (UniqueName: \"kubernetes.io/projected/71b8b921-955b-48c7-ad48-d07a99b590f1-kube-api-access-9l687\") pod \"dnsmasq-dns-7c67bffd47-7tr7b\" (UID: \"71b8b921-955b-48c7-ad48-d07a99b590f1\") " pod="openstack/dnsmasq-dns-7c67bffd47-7tr7b" Oct 07 08:14:00 crc kubenswrapper[4875]: I1007 08:14:00.227245 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bt2vq\" (UniqueName: \"kubernetes.io/projected/5c001ba7-ccf7-40ea-85b4-4ba3c981ac0e-kube-api-access-bt2vq\") pod \"barbican-keystone-listener-5dbb8f897d-wgrt2\" (UID: \"5c001ba7-ccf7-40ea-85b4-4ba3c981ac0e\") " pod="openstack/barbican-keystone-listener-5dbb8f897d-wgrt2" Oct 07 08:14:00 crc kubenswrapper[4875]: I1007 08:14:00.229895 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-chpzc\" (UniqueName: \"kubernetes.io/projected/d86ba6fc-0914-42b4-a080-94a2173841bf-kube-api-access-chpzc\") pod \"barbican-api-6f66dddcf4-7856h\" (UID: \"d86ba6fc-0914-42b4-a080-94a2173841bf\") " pod="openstack/barbican-api-6f66dddcf4-7856h" Oct 07 08:14:00 crc kubenswrapper[4875]: I1007 08:14:00.298784 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-576fbf56f5-4mtlh" Oct 07 08:14:00 crc kubenswrapper[4875]: I1007 08:14:00.342700 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-5dbb8f897d-wgrt2" Oct 07 08:14:00 crc kubenswrapper[4875]: I1007 08:14:00.356127 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7c67bffd47-7tr7b" Oct 07 08:14:00 crc kubenswrapper[4875]: I1007 08:14:00.388204 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-6f66dddcf4-7856h" Oct 07 08:14:00 crc kubenswrapper[4875]: I1007 08:14:00.625997 4875 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 07 08:14:00 crc kubenswrapper[4875]: I1007 08:14:00.626329 4875 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 07 08:14:00 crc kubenswrapper[4875]: I1007 08:14:00.947988 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-576fbf56f5-4mtlh"] Oct 07 08:14:01 crc kubenswrapper[4875]: I1007 08:14:01.130415 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7c67bffd47-7tr7b"] Oct 07 08:14:01 crc kubenswrapper[4875]: W1007 08:14:01.158997 4875 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod71b8b921_955b_48c7_ad48_d07a99b590f1.slice/crio-cbc1a8731a7709d525a71e8e3da1cb4b94962fc2a4fc2b09d08fe4a5f83ba1a0 WatchSource:0}: Error finding container cbc1a8731a7709d525a71e8e3da1cb4b94962fc2a4fc2b09d08fe4a5f83ba1a0: Status 404 returned error can't find the container with id cbc1a8731a7709d525a71e8e3da1cb4b94962fc2a4fc2b09d08fe4a5f83ba1a0 Oct 07 08:14:01 crc kubenswrapper[4875]: I1007 08:14:01.223319 4875 patch_prober.go:28] interesting pod/machine-config-daemon-hx68m container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 07 08:14:01 crc kubenswrapper[4875]: I1007 08:14:01.223390 4875 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" podUID="3928c10c-c3da-41eb-96b2-629d67cfb31f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 07 08:14:01 crc kubenswrapper[4875]: I1007 08:14:01.228177 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-5dbb8f897d-wgrt2"] Oct 07 08:14:01 crc kubenswrapper[4875]: I1007 08:14:01.241606 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-6f66dddcf4-7856h"] Oct 07 08:14:01 crc kubenswrapper[4875]: I1007 08:14:01.649150 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-6f66dddcf4-7856h" event={"ID":"d86ba6fc-0914-42b4-a080-94a2173841bf","Type":"ContainerStarted","Data":"27e6e5e3418e4c1c82efb5edec9404b5c1cc07495d0434e0cb98e75b47d0608a"} Oct 07 08:14:01 crc kubenswrapper[4875]: I1007 08:14:01.662718 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7c67bffd47-7tr7b" event={"ID":"71b8b921-955b-48c7-ad48-d07a99b590f1","Type":"ContainerStarted","Data":"cbc1a8731a7709d525a71e8e3da1cb4b94962fc2a4fc2b09d08fe4a5f83ba1a0"} Oct 07 08:14:01 crc kubenswrapper[4875]: I1007 08:14:01.665673 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-5dbb8f897d-wgrt2" event={"ID":"5c001ba7-ccf7-40ea-85b4-4ba3c981ac0e","Type":"ContainerStarted","Data":"fafc09d0c9854f87e8cd30b6e422e5ef3fe2f4d2c29df47f2d44a38f9636977f"} Oct 07 08:14:01 crc kubenswrapper[4875]: I1007 08:14:01.683146 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-576fbf56f5-4mtlh" event={"ID":"3021a32f-0a9f-4dea-8da2-2ae1df754ccc","Type":"ContainerStarted","Data":"620cfbb9a02ba187efde9f33a75a053ea317c9498a47f97b14fb50b7b25a82ad"} Oct 07 08:14:01 crc kubenswrapper[4875]: I1007 08:14:01.935394 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Oct 07 08:14:01 crc kubenswrapper[4875]: I1007 08:14:01.936545 4875 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 07 08:14:01 crc kubenswrapper[4875]: I1007 08:14:01.942292 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Oct 07 08:14:02 crc kubenswrapper[4875]: I1007 08:14:02.705114 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-6f66dddcf4-7856h" event={"ID":"d86ba6fc-0914-42b4-a080-94a2173841bf","Type":"ContainerStarted","Data":"ad026997af1e389ced04c4015430116b8376e3f10ca6b554005afca59ac16da2"} Oct 07 08:14:02 crc kubenswrapper[4875]: I1007 08:14:02.705462 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-6f66dddcf4-7856h" event={"ID":"d86ba6fc-0914-42b4-a080-94a2173841bf","Type":"ContainerStarted","Data":"b9a61dd90d3fd8b7295bc98fd42493a7bd87aa633a941b86484df5f414ea2035"} Oct 07 08:14:02 crc kubenswrapper[4875]: I1007 08:14:02.705735 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-6f66dddcf4-7856h" Oct 07 08:14:02 crc kubenswrapper[4875]: I1007 08:14:02.705788 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-6f66dddcf4-7856h" Oct 07 08:14:02 crc kubenswrapper[4875]: I1007 08:14:02.716407 4875 generic.go:334] "Generic (PLEG): container finished" podID="71b8b921-955b-48c7-ad48-d07a99b590f1" containerID="c399163faccbf7cc6e78bc0dce5a4f6881823100835d42dd5d42551571a50024" exitCode=0 Oct 07 08:14:02 crc kubenswrapper[4875]: I1007 08:14:02.716671 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7c67bffd47-7tr7b" event={"ID":"71b8b921-955b-48c7-ad48-d07a99b590f1","Type":"ContainerDied","Data":"c399163faccbf7cc6e78bc0dce5a4f6881823100835d42dd5d42551571a50024"} Oct 07 08:14:02 crc kubenswrapper[4875]: I1007 08:14:02.751654 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-8lc4s" event={"ID":"08b9d931-b59f-4e6c-9081-c8b918d37ba8","Type":"ContainerStarted","Data":"8c2adc77b7833010ad017069954d4a16c3ed404af77b83544180902cb9d6f368"} Oct 07 08:14:02 crc kubenswrapper[4875]: I1007 08:14:02.755856 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-6f66dddcf4-7856h" podStartSLOduration=3.755837669 podStartE2EDuration="3.755837669s" podCreationTimestamp="2025-10-07 08:13:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 08:14:02.731840792 +0000 UTC m=+1067.691611335" watchObservedRunningTime="2025-10-07 08:14:02.755837669 +0000 UTC m=+1067.715608212" Oct 07 08:14:02 crc kubenswrapper[4875]: I1007 08:14:02.809482 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-db-sync-8lc4s" podStartSLOduration=3.466977378 podStartE2EDuration="40.809449766s" podCreationTimestamp="2025-10-07 08:13:22 +0000 UTC" firstStartedPulling="2025-10-07 08:13:23.834586496 +0000 UTC m=+1028.794357039" lastFinishedPulling="2025-10-07 08:14:01.177058884 +0000 UTC m=+1066.136829427" observedRunningTime="2025-10-07 08:14:02.803437009 +0000 UTC m=+1067.763207552" watchObservedRunningTime="2025-10-07 08:14:02.809449766 +0000 UTC m=+1067.769220329" Oct 07 08:14:02 crc kubenswrapper[4875]: I1007 08:14:02.953011 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-d454b8786-74tns"] Oct 07 08:14:02 crc kubenswrapper[4875]: I1007 08:14:02.955320 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-d454b8786-74tns" Oct 07 08:14:02 crc kubenswrapper[4875]: I1007 08:14:02.958279 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-internal-svc" Oct 07 08:14:02 crc kubenswrapper[4875]: I1007 08:14:02.958652 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-public-svc" Oct 07 08:14:02 crc kubenswrapper[4875]: I1007 08:14:02.964825 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-d454b8786-74tns"] Oct 07 08:14:03 crc kubenswrapper[4875]: I1007 08:14:03.104081 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/127bbfa5-9171-435f-99d2-069db85c4d67-public-tls-certs\") pod \"barbican-api-d454b8786-74tns\" (UID: \"127bbfa5-9171-435f-99d2-069db85c4d67\") " pod="openstack/barbican-api-d454b8786-74tns" Oct 07 08:14:03 crc kubenswrapper[4875]: I1007 08:14:03.104171 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xsnq4\" (UniqueName: \"kubernetes.io/projected/127bbfa5-9171-435f-99d2-069db85c4d67-kube-api-access-xsnq4\") pod \"barbican-api-d454b8786-74tns\" (UID: \"127bbfa5-9171-435f-99d2-069db85c4d67\") " pod="openstack/barbican-api-d454b8786-74tns" Oct 07 08:14:03 crc kubenswrapper[4875]: I1007 08:14:03.104197 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/127bbfa5-9171-435f-99d2-069db85c4d67-internal-tls-certs\") pod \"barbican-api-d454b8786-74tns\" (UID: \"127bbfa5-9171-435f-99d2-069db85c4d67\") " pod="openstack/barbican-api-d454b8786-74tns" Oct 07 08:14:03 crc kubenswrapper[4875]: I1007 08:14:03.104269 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/127bbfa5-9171-435f-99d2-069db85c4d67-combined-ca-bundle\") pod \"barbican-api-d454b8786-74tns\" (UID: \"127bbfa5-9171-435f-99d2-069db85c4d67\") " pod="openstack/barbican-api-d454b8786-74tns" Oct 07 08:14:03 crc kubenswrapper[4875]: I1007 08:14:03.104295 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/127bbfa5-9171-435f-99d2-069db85c4d67-config-data-custom\") pod \"barbican-api-d454b8786-74tns\" (UID: \"127bbfa5-9171-435f-99d2-069db85c4d67\") " pod="openstack/barbican-api-d454b8786-74tns" Oct 07 08:14:03 crc kubenswrapper[4875]: I1007 08:14:03.104328 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/127bbfa5-9171-435f-99d2-069db85c4d67-config-data\") pod \"barbican-api-d454b8786-74tns\" (UID: \"127bbfa5-9171-435f-99d2-069db85c4d67\") " pod="openstack/barbican-api-d454b8786-74tns" Oct 07 08:14:03 crc kubenswrapper[4875]: I1007 08:14:03.104370 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/127bbfa5-9171-435f-99d2-069db85c4d67-logs\") pod \"barbican-api-d454b8786-74tns\" (UID: \"127bbfa5-9171-435f-99d2-069db85c4d67\") " pod="openstack/barbican-api-d454b8786-74tns" Oct 07 08:14:03 crc kubenswrapper[4875]: I1007 08:14:03.208809 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/127bbfa5-9171-435f-99d2-069db85c4d67-logs\") pod \"barbican-api-d454b8786-74tns\" (UID: \"127bbfa5-9171-435f-99d2-069db85c4d67\") " pod="openstack/barbican-api-d454b8786-74tns" Oct 07 08:14:03 crc kubenswrapper[4875]: I1007 08:14:03.209113 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/127bbfa5-9171-435f-99d2-069db85c4d67-public-tls-certs\") pod \"barbican-api-d454b8786-74tns\" (UID: \"127bbfa5-9171-435f-99d2-069db85c4d67\") " pod="openstack/barbican-api-d454b8786-74tns" Oct 07 08:14:03 crc kubenswrapper[4875]: I1007 08:14:03.209342 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xsnq4\" (UniqueName: \"kubernetes.io/projected/127bbfa5-9171-435f-99d2-069db85c4d67-kube-api-access-xsnq4\") pod \"barbican-api-d454b8786-74tns\" (UID: \"127bbfa5-9171-435f-99d2-069db85c4d67\") " pod="openstack/barbican-api-d454b8786-74tns" Oct 07 08:14:03 crc kubenswrapper[4875]: I1007 08:14:03.209412 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/127bbfa5-9171-435f-99d2-069db85c4d67-internal-tls-certs\") pod \"barbican-api-d454b8786-74tns\" (UID: \"127bbfa5-9171-435f-99d2-069db85c4d67\") " pod="openstack/barbican-api-d454b8786-74tns" Oct 07 08:14:03 crc kubenswrapper[4875]: I1007 08:14:03.209548 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/127bbfa5-9171-435f-99d2-069db85c4d67-combined-ca-bundle\") pod \"barbican-api-d454b8786-74tns\" (UID: \"127bbfa5-9171-435f-99d2-069db85c4d67\") " pod="openstack/barbican-api-d454b8786-74tns" Oct 07 08:14:03 crc kubenswrapper[4875]: I1007 08:14:03.209629 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/127bbfa5-9171-435f-99d2-069db85c4d67-config-data-custom\") pod \"barbican-api-d454b8786-74tns\" (UID: \"127bbfa5-9171-435f-99d2-069db85c4d67\") " pod="openstack/barbican-api-d454b8786-74tns" Oct 07 08:14:03 crc kubenswrapper[4875]: I1007 08:14:03.209636 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/127bbfa5-9171-435f-99d2-069db85c4d67-logs\") pod \"barbican-api-d454b8786-74tns\" (UID: \"127bbfa5-9171-435f-99d2-069db85c4d67\") " pod="openstack/barbican-api-d454b8786-74tns" Oct 07 08:14:03 crc kubenswrapper[4875]: I1007 08:14:03.209821 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/127bbfa5-9171-435f-99d2-069db85c4d67-config-data\") pod \"barbican-api-d454b8786-74tns\" (UID: \"127bbfa5-9171-435f-99d2-069db85c4d67\") " pod="openstack/barbican-api-d454b8786-74tns" Oct 07 08:14:03 crc kubenswrapper[4875]: I1007 08:14:03.217073 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/127bbfa5-9171-435f-99d2-069db85c4d67-internal-tls-certs\") pod \"barbican-api-d454b8786-74tns\" (UID: \"127bbfa5-9171-435f-99d2-069db85c4d67\") " pod="openstack/barbican-api-d454b8786-74tns" Oct 07 08:14:03 crc kubenswrapper[4875]: I1007 08:14:03.219243 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/127bbfa5-9171-435f-99d2-069db85c4d67-public-tls-certs\") pod \"barbican-api-d454b8786-74tns\" (UID: \"127bbfa5-9171-435f-99d2-069db85c4d67\") " pod="openstack/barbican-api-d454b8786-74tns" Oct 07 08:14:03 crc kubenswrapper[4875]: I1007 08:14:03.220244 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/127bbfa5-9171-435f-99d2-069db85c4d67-config-data\") pod \"barbican-api-d454b8786-74tns\" (UID: \"127bbfa5-9171-435f-99d2-069db85c4d67\") " pod="openstack/barbican-api-d454b8786-74tns" Oct 07 08:14:03 crc kubenswrapper[4875]: I1007 08:14:03.220919 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/127bbfa5-9171-435f-99d2-069db85c4d67-combined-ca-bundle\") pod \"barbican-api-d454b8786-74tns\" (UID: \"127bbfa5-9171-435f-99d2-069db85c4d67\") " pod="openstack/barbican-api-d454b8786-74tns" Oct 07 08:14:03 crc kubenswrapper[4875]: I1007 08:14:03.225151 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/127bbfa5-9171-435f-99d2-069db85c4d67-config-data-custom\") pod \"barbican-api-d454b8786-74tns\" (UID: \"127bbfa5-9171-435f-99d2-069db85c4d67\") " pod="openstack/barbican-api-d454b8786-74tns" Oct 07 08:14:03 crc kubenswrapper[4875]: I1007 08:14:03.230028 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xsnq4\" (UniqueName: \"kubernetes.io/projected/127bbfa5-9171-435f-99d2-069db85c4d67-kube-api-access-xsnq4\") pod \"barbican-api-d454b8786-74tns\" (UID: \"127bbfa5-9171-435f-99d2-069db85c4d67\") " pod="openstack/barbican-api-d454b8786-74tns" Oct 07 08:14:03 crc kubenswrapper[4875]: I1007 08:14:03.289728 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-d454b8786-74tns" Oct 07 08:14:04 crc kubenswrapper[4875]: I1007 08:14:04.831364 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-5dbb8f897d-wgrt2" event={"ID":"5c001ba7-ccf7-40ea-85b4-4ba3c981ac0e","Type":"ContainerStarted","Data":"ae6cdee333c7451a89fc542780af7b88f78242f0bc0849e400e3bffc748e711f"} Oct 07 08:14:04 crc kubenswrapper[4875]: I1007 08:14:04.841026 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-576fbf56f5-4mtlh" event={"ID":"3021a32f-0a9f-4dea-8da2-2ae1df754ccc","Type":"ContainerStarted","Data":"663fdf05de34300e781987a7e21f9f53f0a18a94c88615dd4b2f4f0a98d21125"} Oct 07 08:14:04 crc kubenswrapper[4875]: I1007 08:14:04.844399 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7c67bffd47-7tr7b" event={"ID":"71b8b921-955b-48c7-ad48-d07a99b590f1","Type":"ContainerStarted","Data":"94827a03e3d7087f8329f692e36bdc52b3061482411d608047c3c29a31495d44"} Oct 07 08:14:04 crc kubenswrapper[4875]: I1007 08:14:04.846080 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-7c67bffd47-7tr7b" Oct 07 08:14:04 crc kubenswrapper[4875]: I1007 08:14:04.874138 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-7c67bffd47-7tr7b" podStartSLOduration=5.874115659 podStartE2EDuration="5.874115659s" podCreationTimestamp="2025-10-07 08:13:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 08:14:04.867654998 +0000 UTC m=+1069.827425541" watchObservedRunningTime="2025-10-07 08:14:04.874115659 +0000 UTC m=+1069.833886202" Oct 07 08:14:04 crc kubenswrapper[4875]: I1007 08:14:04.944508 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-d454b8786-74tns"] Oct 07 08:14:04 crc kubenswrapper[4875]: W1007 08:14:04.979402 4875 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod127bbfa5_9171_435f_99d2_069db85c4d67.slice/crio-f696bfc3afcec9e75401ed7de551e353994ba99b6c1c7e1ea147c5f18cacba4d WatchSource:0}: Error finding container f696bfc3afcec9e75401ed7de551e353994ba99b6c1c7e1ea147c5f18cacba4d: Status 404 returned error can't find the container with id f696bfc3afcec9e75401ed7de551e353994ba99b6c1c7e1ea147c5f18cacba4d Oct 07 08:14:05 crc kubenswrapper[4875]: I1007 08:14:05.858834 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-5dbb8f897d-wgrt2" event={"ID":"5c001ba7-ccf7-40ea-85b4-4ba3c981ac0e","Type":"ContainerStarted","Data":"abd1a7242b7c0e917c5248c022087a96932c44bd427e36b7014ad5fd75fcbe11"} Oct 07 08:14:05 crc kubenswrapper[4875]: I1007 08:14:05.864868 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-576fbf56f5-4mtlh" event={"ID":"3021a32f-0a9f-4dea-8da2-2ae1df754ccc","Type":"ContainerStarted","Data":"ee7e97e88645ad7664060a40e1787e93c986415f38afe940de3b15e9bdcc65e8"} Oct 07 08:14:05 crc kubenswrapper[4875]: I1007 08:14:05.867279 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-d454b8786-74tns" event={"ID":"127bbfa5-9171-435f-99d2-069db85c4d67","Type":"ContainerStarted","Data":"8d88a8c7cfc3c0bbb47b068537c786be8af5658581f52ac71415e97d89969af4"} Oct 07 08:14:05 crc kubenswrapper[4875]: I1007 08:14:05.867318 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-d454b8786-74tns" event={"ID":"127bbfa5-9171-435f-99d2-069db85c4d67","Type":"ContainerStarted","Data":"4a1688ecef648c3013d53b0ab775f75cb36145a702521220651c73f80b11e451"} Oct 07 08:14:05 crc kubenswrapper[4875]: I1007 08:14:05.867329 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-d454b8786-74tns" event={"ID":"127bbfa5-9171-435f-99d2-069db85c4d67","Type":"ContainerStarted","Data":"f696bfc3afcec9e75401ed7de551e353994ba99b6c1c7e1ea147c5f18cacba4d"} Oct 07 08:14:05 crc kubenswrapper[4875]: I1007 08:14:05.867461 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-d454b8786-74tns" Oct 07 08:14:05 crc kubenswrapper[4875]: I1007 08:14:05.867478 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-d454b8786-74tns" Oct 07 08:14:05 crc kubenswrapper[4875]: I1007 08:14:05.887942 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-keystone-listener-5dbb8f897d-wgrt2" podStartSLOduration=3.742470291 podStartE2EDuration="6.887912074s" podCreationTimestamp="2025-10-07 08:13:59 +0000 UTC" firstStartedPulling="2025-10-07 08:14:01.316075379 +0000 UTC m=+1066.275845922" lastFinishedPulling="2025-10-07 08:14:04.461517162 +0000 UTC m=+1069.421287705" observedRunningTime="2025-10-07 08:14:05.883120527 +0000 UTC m=+1070.842891090" watchObservedRunningTime="2025-10-07 08:14:05.887912074 +0000 UTC m=+1070.847682617" Oct 07 08:14:05 crc kubenswrapper[4875]: I1007 08:14:05.925123 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-d454b8786-74tns" podStartSLOduration=3.925084442 podStartE2EDuration="3.925084442s" podCreationTimestamp="2025-10-07 08:14:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 08:14:05.909086028 +0000 UTC m=+1070.868856601" watchObservedRunningTime="2025-10-07 08:14:05.925084442 +0000 UTC m=+1070.884854995" Oct 07 08:14:05 crc kubenswrapper[4875]: I1007 08:14:05.942440 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-worker-576fbf56f5-4mtlh" podStartSLOduration=3.514917027 podStartE2EDuration="6.94241051s" podCreationTimestamp="2025-10-07 08:13:59 +0000 UTC" firstStartedPulling="2025-10-07 08:14:01.018153239 +0000 UTC m=+1065.977923782" lastFinishedPulling="2025-10-07 08:14:04.445646722 +0000 UTC m=+1069.405417265" observedRunningTime="2025-10-07 08:14:05.926082504 +0000 UTC m=+1070.885853067" watchObservedRunningTime="2025-10-07 08:14:05.94241051 +0000 UTC m=+1070.902181053" Oct 07 08:14:08 crc kubenswrapper[4875]: I1007 08:14:08.589786 4875 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-8486dbbd8b-5n679" podUID="35b9d27a-06f2-4b00-917f-f078fdf1b1c2" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.148:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.148:8443: connect: connection refused" Oct 07 08:14:08 crc kubenswrapper[4875]: I1007 08:14:08.668119 4875 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-64854f4c8-d67s8" podUID="cfb37cba-9925-4808-9b9f-6dfd2550c15e" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.149:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.149:8443: connect: connection refused" Oct 07 08:14:09 crc kubenswrapper[4875]: I1007 08:14:09.913740 4875 generic.go:334] "Generic (PLEG): container finished" podID="08b9d931-b59f-4e6c-9081-c8b918d37ba8" containerID="8c2adc77b7833010ad017069954d4a16c3ed404af77b83544180902cb9d6f368" exitCode=0 Oct 07 08:14:09 crc kubenswrapper[4875]: I1007 08:14:09.913834 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-8lc4s" event={"ID":"08b9d931-b59f-4e6c-9081-c8b918d37ba8","Type":"ContainerDied","Data":"8c2adc77b7833010ad017069954d4a16c3ed404af77b83544180902cb9d6f368"} Oct 07 08:14:10 crc kubenswrapper[4875]: I1007 08:14:10.358156 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-7c67bffd47-7tr7b" Oct 07 08:14:10 crc kubenswrapper[4875]: I1007 08:14:10.444578 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-56df8fb6b7-l2fn2"] Oct 07 08:14:10 crc kubenswrapper[4875]: I1007 08:14:10.445347 4875 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-56df8fb6b7-l2fn2" podUID="5b0d35a4-cfa4-4c0a-8ca2-4db85aefc765" containerName="dnsmasq-dns" containerID="cri-o://3e9aacff9d467b59a8621d49d0e53e11257a4e6e0170bece7f549aefa5f88482" gracePeriod=10 Oct 07 08:14:10 crc kubenswrapper[4875]: I1007 08:14:10.863919 4875 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-56df8fb6b7-l2fn2" podUID="5b0d35a4-cfa4-4c0a-8ca2-4db85aefc765" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.140:5353: connect: connection refused" Oct 07 08:14:10 crc kubenswrapper[4875]: I1007 08:14:10.930270 4875 generic.go:334] "Generic (PLEG): container finished" podID="927a5f2e-c935-47bb-b7b3-0efc834566ca" containerID="3c57a58bee060a2594c16b9cc01373c4132f22d841e87e265afd0d2a0eff9a1c" exitCode=0 Oct 07 08:14:10 crc kubenswrapper[4875]: I1007 08:14:10.930378 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-rrk4v" event={"ID":"927a5f2e-c935-47bb-b7b3-0efc834566ca","Type":"ContainerDied","Data":"3c57a58bee060a2594c16b9cc01373c4132f22d841e87e265afd0d2a0eff9a1c"} Oct 07 08:14:10 crc kubenswrapper[4875]: I1007 08:14:10.955475 4875 generic.go:334] "Generic (PLEG): container finished" podID="5b0d35a4-cfa4-4c0a-8ca2-4db85aefc765" containerID="3e9aacff9d467b59a8621d49d0e53e11257a4e6e0170bece7f549aefa5f88482" exitCode=0 Oct 07 08:14:10 crc kubenswrapper[4875]: I1007 08:14:10.956365 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-56df8fb6b7-l2fn2" event={"ID":"5b0d35a4-cfa4-4c0a-8ca2-4db85aefc765","Type":"ContainerDied","Data":"3e9aacff9d467b59a8621d49d0e53e11257a4e6e0170bece7f549aefa5f88482"} Oct 07 08:14:11 crc kubenswrapper[4875]: I1007 08:14:11.680512 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-8lc4s" Oct 07 08:14:11 crc kubenswrapper[4875]: I1007 08:14:11.730901 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s4csn\" (UniqueName: \"kubernetes.io/projected/08b9d931-b59f-4e6c-9081-c8b918d37ba8-kube-api-access-s4csn\") pod \"08b9d931-b59f-4e6c-9081-c8b918d37ba8\" (UID: \"08b9d931-b59f-4e6c-9081-c8b918d37ba8\") " Oct 07 08:14:11 crc kubenswrapper[4875]: I1007 08:14:11.759582 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/08b9d931-b59f-4e6c-9081-c8b918d37ba8-kube-api-access-s4csn" (OuterVolumeSpecName: "kube-api-access-s4csn") pod "08b9d931-b59f-4e6c-9081-c8b918d37ba8" (UID: "08b9d931-b59f-4e6c-9081-c8b918d37ba8"). InnerVolumeSpecName "kube-api-access-s4csn". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 08:14:11 crc kubenswrapper[4875]: I1007 08:14:11.837354 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/08b9d931-b59f-4e6c-9081-c8b918d37ba8-config-data\") pod \"08b9d931-b59f-4e6c-9081-c8b918d37ba8\" (UID: \"08b9d931-b59f-4e6c-9081-c8b918d37ba8\") " Oct 07 08:14:11 crc kubenswrapper[4875]: I1007 08:14:11.837474 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/08b9d931-b59f-4e6c-9081-c8b918d37ba8-db-sync-config-data\") pod \"08b9d931-b59f-4e6c-9081-c8b918d37ba8\" (UID: \"08b9d931-b59f-4e6c-9081-c8b918d37ba8\") " Oct 07 08:14:11 crc kubenswrapper[4875]: I1007 08:14:11.837571 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/08b9d931-b59f-4e6c-9081-c8b918d37ba8-scripts\") pod \"08b9d931-b59f-4e6c-9081-c8b918d37ba8\" (UID: \"08b9d931-b59f-4e6c-9081-c8b918d37ba8\") " Oct 07 08:14:11 crc kubenswrapper[4875]: I1007 08:14:11.837599 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/08b9d931-b59f-4e6c-9081-c8b918d37ba8-combined-ca-bundle\") pod \"08b9d931-b59f-4e6c-9081-c8b918d37ba8\" (UID: \"08b9d931-b59f-4e6c-9081-c8b918d37ba8\") " Oct 07 08:14:11 crc kubenswrapper[4875]: I1007 08:14:11.837686 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/08b9d931-b59f-4e6c-9081-c8b918d37ba8-etc-machine-id\") pod \"08b9d931-b59f-4e6c-9081-c8b918d37ba8\" (UID: \"08b9d931-b59f-4e6c-9081-c8b918d37ba8\") " Oct 07 08:14:11 crc kubenswrapper[4875]: I1007 08:14:11.838012 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s4csn\" (UniqueName: \"kubernetes.io/projected/08b9d931-b59f-4e6c-9081-c8b918d37ba8-kube-api-access-s4csn\") on node \"crc\" DevicePath \"\"" Oct 07 08:14:11 crc kubenswrapper[4875]: I1007 08:14:11.838079 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/08b9d931-b59f-4e6c-9081-c8b918d37ba8-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "08b9d931-b59f-4e6c-9081-c8b918d37ba8" (UID: "08b9d931-b59f-4e6c-9081-c8b918d37ba8"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 07 08:14:11 crc kubenswrapper[4875]: I1007 08:14:11.845268 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/08b9d931-b59f-4e6c-9081-c8b918d37ba8-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "08b9d931-b59f-4e6c-9081-c8b918d37ba8" (UID: "08b9d931-b59f-4e6c-9081-c8b918d37ba8"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:14:11 crc kubenswrapper[4875]: I1007 08:14:11.845367 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/08b9d931-b59f-4e6c-9081-c8b918d37ba8-scripts" (OuterVolumeSpecName: "scripts") pod "08b9d931-b59f-4e6c-9081-c8b918d37ba8" (UID: "08b9d931-b59f-4e6c-9081-c8b918d37ba8"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:14:11 crc kubenswrapper[4875]: I1007 08:14:11.898038 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/08b9d931-b59f-4e6c-9081-c8b918d37ba8-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "08b9d931-b59f-4e6c-9081-c8b918d37ba8" (UID: "08b9d931-b59f-4e6c-9081-c8b918d37ba8"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:14:11 crc kubenswrapper[4875]: I1007 08:14:11.907247 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/08b9d931-b59f-4e6c-9081-c8b918d37ba8-config-data" (OuterVolumeSpecName: "config-data") pod "08b9d931-b59f-4e6c-9081-c8b918d37ba8" (UID: "08b9d931-b59f-4e6c-9081-c8b918d37ba8"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:14:11 crc kubenswrapper[4875]: I1007 08:14:11.939482 4875 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/08b9d931-b59f-4e6c-9081-c8b918d37ba8-scripts\") on node \"crc\" DevicePath \"\"" Oct 07 08:14:11 crc kubenswrapper[4875]: I1007 08:14:11.939522 4875 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/08b9d931-b59f-4e6c-9081-c8b918d37ba8-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 08:14:11 crc kubenswrapper[4875]: I1007 08:14:11.939536 4875 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/08b9d931-b59f-4e6c-9081-c8b918d37ba8-etc-machine-id\") on node \"crc\" DevicePath \"\"" Oct 07 08:14:11 crc kubenswrapper[4875]: I1007 08:14:11.939550 4875 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/08b9d931-b59f-4e6c-9081-c8b918d37ba8-config-data\") on node \"crc\" DevicePath \"\"" Oct 07 08:14:11 crc kubenswrapper[4875]: I1007 08:14:11.939564 4875 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/08b9d931-b59f-4e6c-9081-c8b918d37ba8-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Oct 07 08:14:11 crc kubenswrapper[4875]: I1007 08:14:11.992237 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-8lc4s" Oct 07 08:14:11 crc kubenswrapper[4875]: I1007 08:14:11.992208 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-8lc4s" event={"ID":"08b9d931-b59f-4e6c-9081-c8b918d37ba8","Type":"ContainerDied","Data":"c7e3c0cce439c3061ec08e44642ded727271be32b9936b9638a6471ce2568d5c"} Oct 07 08:14:11 crc kubenswrapper[4875]: I1007 08:14:11.993931 4875 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c7e3c0cce439c3061ec08e44642ded727271be32b9936b9638a6471ce2568d5c" Oct 07 08:14:12 crc kubenswrapper[4875]: I1007 08:14:12.252619 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Oct 07 08:14:12 crc kubenswrapper[4875]: E1007 08:14:12.253197 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="08b9d931-b59f-4e6c-9081-c8b918d37ba8" containerName="cinder-db-sync" Oct 07 08:14:12 crc kubenswrapper[4875]: I1007 08:14:12.253218 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="08b9d931-b59f-4e6c-9081-c8b918d37ba8" containerName="cinder-db-sync" Oct 07 08:14:12 crc kubenswrapper[4875]: I1007 08:14:12.253405 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="08b9d931-b59f-4e6c-9081-c8b918d37ba8" containerName="cinder-db-sync" Oct 07 08:14:12 crc kubenswrapper[4875]: I1007 08:14:12.254531 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Oct 07 08:14:12 crc kubenswrapper[4875]: I1007 08:14:12.265643 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-qdjjh" Oct 07 08:14:12 crc kubenswrapper[4875]: I1007 08:14:12.265996 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Oct 07 08:14:12 crc kubenswrapper[4875]: I1007 08:14:12.266162 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Oct 07 08:14:12 crc kubenswrapper[4875]: I1007 08:14:12.294368 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Oct 07 08:14:12 crc kubenswrapper[4875]: I1007 08:14:12.395100 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Oct 07 08:14:12 crc kubenswrapper[4875]: I1007 08:14:12.427396 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5cc8b5d5c5-nw28k"] Oct 07 08:14:12 crc kubenswrapper[4875]: I1007 08:14:12.432543 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5cc8b5d5c5-nw28k" Oct 07 08:14:12 crc kubenswrapper[4875]: I1007 08:14:12.470387 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/df5f11ac-252a-4de8-9493-261081690646-scripts\") pod \"cinder-scheduler-0\" (UID: \"df5f11ac-252a-4de8-9493-261081690646\") " pod="openstack/cinder-scheduler-0" Oct 07 08:14:12 crc kubenswrapper[4875]: I1007 08:14:12.470699 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/df5f11ac-252a-4de8-9493-261081690646-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"df5f11ac-252a-4de8-9493-261081690646\") " pod="openstack/cinder-scheduler-0" Oct 07 08:14:12 crc kubenswrapper[4875]: I1007 08:14:12.470755 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/df5f11ac-252a-4de8-9493-261081690646-config-data\") pod \"cinder-scheduler-0\" (UID: \"df5f11ac-252a-4de8-9493-261081690646\") " pod="openstack/cinder-scheduler-0" Oct 07 08:14:12 crc kubenswrapper[4875]: I1007 08:14:12.470819 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/df5f11ac-252a-4de8-9493-261081690646-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"df5f11ac-252a-4de8-9493-261081690646\") " pod="openstack/cinder-scheduler-0" Oct 07 08:14:12 crc kubenswrapper[4875]: I1007 08:14:12.470842 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6cm7p\" (UniqueName: \"kubernetes.io/projected/df5f11ac-252a-4de8-9493-261081690646-kube-api-access-6cm7p\") pod \"cinder-scheduler-0\" (UID: \"df5f11ac-252a-4de8-9493-261081690646\") " pod="openstack/cinder-scheduler-0" Oct 07 08:14:12 crc kubenswrapper[4875]: I1007 08:14:12.470862 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/df5f11ac-252a-4de8-9493-261081690646-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"df5f11ac-252a-4de8-9493-261081690646\") " pod="openstack/cinder-scheduler-0" Oct 07 08:14:12 crc kubenswrapper[4875]: I1007 08:14:12.530979 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5cc8b5d5c5-nw28k"] Oct 07 08:14:12 crc kubenswrapper[4875]: I1007 08:14:12.557960 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Oct 07 08:14:12 crc kubenswrapper[4875]: I1007 08:14:12.564028 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Oct 07 08:14:12 crc kubenswrapper[4875]: I1007 08:14:12.574711 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Oct 07 08:14:12 crc kubenswrapper[4875]: I1007 08:14:12.575950 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/df5f11ac-252a-4de8-9493-261081690646-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"df5f11ac-252a-4de8-9493-261081690646\") " pod="openstack/cinder-scheduler-0" Oct 07 08:14:12 crc kubenswrapper[4875]: I1007 08:14:12.576002 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fe3d321f-e7cf-44de-9419-b7e40191ba52-config-data\") pod \"cinder-api-0\" (UID: \"fe3d321f-e7cf-44de-9419-b7e40191ba52\") " pod="openstack/cinder-api-0" Oct 07 08:14:12 crc kubenswrapper[4875]: I1007 08:14:12.576030 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fe3d321f-e7cf-44de-9419-b7e40191ba52-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"fe3d321f-e7cf-44de-9419-b7e40191ba52\") " pod="openstack/cinder-api-0" Oct 07 08:14:12 crc kubenswrapper[4875]: I1007 08:14:12.576096 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7f159cb5-e32e-4e8d-acac-0ec48e2ab021-ovsdbserver-nb\") pod \"dnsmasq-dns-5cc8b5d5c5-nw28k\" (UID: \"7f159cb5-e32e-4e8d-acac-0ec48e2ab021\") " pod="openstack/dnsmasq-dns-5cc8b5d5c5-nw28k" Oct 07 08:14:12 crc kubenswrapper[4875]: I1007 08:14:12.576125 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7f159cb5-e32e-4e8d-acac-0ec48e2ab021-ovsdbserver-sb\") pod \"dnsmasq-dns-5cc8b5d5c5-nw28k\" (UID: \"7f159cb5-e32e-4e8d-acac-0ec48e2ab021\") " pod="openstack/dnsmasq-dns-5cc8b5d5c5-nw28k" Oct 07 08:14:12 crc kubenswrapper[4875]: I1007 08:14:12.576150 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/7f159cb5-e32e-4e8d-acac-0ec48e2ab021-dns-swift-storage-0\") pod \"dnsmasq-dns-5cc8b5d5c5-nw28k\" (UID: \"7f159cb5-e32e-4e8d-acac-0ec48e2ab021\") " pod="openstack/dnsmasq-dns-5cc8b5d5c5-nw28k" Oct 07 08:14:12 crc kubenswrapper[4875]: I1007 08:14:12.576183 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/fe3d321f-e7cf-44de-9419-b7e40191ba52-config-data-custom\") pod \"cinder-api-0\" (UID: \"fe3d321f-e7cf-44de-9419-b7e40191ba52\") " pod="openstack/cinder-api-0" Oct 07 08:14:12 crc kubenswrapper[4875]: I1007 08:14:12.576218 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/df5f11ac-252a-4de8-9493-261081690646-scripts\") pod \"cinder-scheduler-0\" (UID: \"df5f11ac-252a-4de8-9493-261081690646\") " pod="openstack/cinder-scheduler-0" Oct 07 08:14:12 crc kubenswrapper[4875]: I1007 08:14:12.576245 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bv699\" (UniqueName: \"kubernetes.io/projected/fe3d321f-e7cf-44de-9419-b7e40191ba52-kube-api-access-bv699\") pod \"cinder-api-0\" (UID: \"fe3d321f-e7cf-44de-9419-b7e40191ba52\") " pod="openstack/cinder-api-0" Oct 07 08:14:12 crc kubenswrapper[4875]: I1007 08:14:12.576264 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/df5f11ac-252a-4de8-9493-261081690646-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"df5f11ac-252a-4de8-9493-261081690646\") " pod="openstack/cinder-scheduler-0" Oct 07 08:14:12 crc kubenswrapper[4875]: I1007 08:14:12.576322 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/fe3d321f-e7cf-44de-9419-b7e40191ba52-etc-machine-id\") pod \"cinder-api-0\" (UID: \"fe3d321f-e7cf-44de-9419-b7e40191ba52\") " pod="openstack/cinder-api-0" Oct 07 08:14:12 crc kubenswrapper[4875]: I1007 08:14:12.576346 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/df5f11ac-252a-4de8-9493-261081690646-config-data\") pod \"cinder-scheduler-0\" (UID: \"df5f11ac-252a-4de8-9493-261081690646\") " pod="openstack/cinder-scheduler-0" Oct 07 08:14:12 crc kubenswrapper[4875]: I1007 08:14:12.576370 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dtdbv\" (UniqueName: \"kubernetes.io/projected/7f159cb5-e32e-4e8d-acac-0ec48e2ab021-kube-api-access-dtdbv\") pod \"dnsmasq-dns-5cc8b5d5c5-nw28k\" (UID: \"7f159cb5-e32e-4e8d-acac-0ec48e2ab021\") " pod="openstack/dnsmasq-dns-5cc8b5d5c5-nw28k" Oct 07 08:14:12 crc kubenswrapper[4875]: I1007 08:14:12.576395 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fe3d321f-e7cf-44de-9419-b7e40191ba52-scripts\") pod \"cinder-api-0\" (UID: \"fe3d321f-e7cf-44de-9419-b7e40191ba52\") " pod="openstack/cinder-api-0" Oct 07 08:14:12 crc kubenswrapper[4875]: I1007 08:14:12.576426 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7f159cb5-e32e-4e8d-acac-0ec48e2ab021-dns-svc\") pod \"dnsmasq-dns-5cc8b5d5c5-nw28k\" (UID: \"7f159cb5-e32e-4e8d-acac-0ec48e2ab021\") " pod="openstack/dnsmasq-dns-5cc8b5d5c5-nw28k" Oct 07 08:14:12 crc kubenswrapper[4875]: I1007 08:14:12.576443 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fe3d321f-e7cf-44de-9419-b7e40191ba52-logs\") pod \"cinder-api-0\" (UID: \"fe3d321f-e7cf-44de-9419-b7e40191ba52\") " pod="openstack/cinder-api-0" Oct 07 08:14:12 crc kubenswrapper[4875]: I1007 08:14:12.576480 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7f159cb5-e32e-4e8d-acac-0ec48e2ab021-config\") pod \"dnsmasq-dns-5cc8b5d5c5-nw28k\" (UID: \"7f159cb5-e32e-4e8d-acac-0ec48e2ab021\") " pod="openstack/dnsmasq-dns-5cc8b5d5c5-nw28k" Oct 07 08:14:12 crc kubenswrapper[4875]: I1007 08:14:12.576508 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/df5f11ac-252a-4de8-9493-261081690646-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"df5f11ac-252a-4de8-9493-261081690646\") " pod="openstack/cinder-scheduler-0" Oct 07 08:14:12 crc kubenswrapper[4875]: I1007 08:14:12.576533 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6cm7p\" (UniqueName: \"kubernetes.io/projected/df5f11ac-252a-4de8-9493-261081690646-kube-api-access-6cm7p\") pod \"cinder-scheduler-0\" (UID: \"df5f11ac-252a-4de8-9493-261081690646\") " pod="openstack/cinder-scheduler-0" Oct 07 08:14:12 crc kubenswrapper[4875]: I1007 08:14:12.576796 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/df5f11ac-252a-4de8-9493-261081690646-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"df5f11ac-252a-4de8-9493-261081690646\") " pod="openstack/cinder-scheduler-0" Oct 07 08:14:12 crc kubenswrapper[4875]: I1007 08:14:12.582114 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Oct 07 08:14:12 crc kubenswrapper[4875]: I1007 08:14:12.593760 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/df5f11ac-252a-4de8-9493-261081690646-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"df5f11ac-252a-4de8-9493-261081690646\") " pod="openstack/cinder-scheduler-0" Oct 07 08:14:12 crc kubenswrapper[4875]: I1007 08:14:12.595390 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/df5f11ac-252a-4de8-9493-261081690646-scripts\") pod \"cinder-scheduler-0\" (UID: \"df5f11ac-252a-4de8-9493-261081690646\") " pod="openstack/cinder-scheduler-0" Oct 07 08:14:12 crc kubenswrapper[4875]: I1007 08:14:12.600503 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/df5f11ac-252a-4de8-9493-261081690646-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"df5f11ac-252a-4de8-9493-261081690646\") " pod="openstack/cinder-scheduler-0" Oct 07 08:14:12 crc kubenswrapper[4875]: I1007 08:14:12.616697 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6cm7p\" (UniqueName: \"kubernetes.io/projected/df5f11ac-252a-4de8-9493-261081690646-kube-api-access-6cm7p\") pod \"cinder-scheduler-0\" (UID: \"df5f11ac-252a-4de8-9493-261081690646\") " pod="openstack/cinder-scheduler-0" Oct 07 08:14:12 crc kubenswrapper[4875]: I1007 08:14:12.620862 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/df5f11ac-252a-4de8-9493-261081690646-config-data\") pod \"cinder-scheduler-0\" (UID: \"df5f11ac-252a-4de8-9493-261081690646\") " pod="openstack/cinder-scheduler-0" Oct 07 08:14:12 crc kubenswrapper[4875]: I1007 08:14:12.682215 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7f159cb5-e32e-4e8d-acac-0ec48e2ab021-dns-svc\") pod \"dnsmasq-dns-5cc8b5d5c5-nw28k\" (UID: \"7f159cb5-e32e-4e8d-acac-0ec48e2ab021\") " pod="openstack/dnsmasq-dns-5cc8b5d5c5-nw28k" Oct 07 08:14:12 crc kubenswrapper[4875]: I1007 08:14:12.682270 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fe3d321f-e7cf-44de-9419-b7e40191ba52-logs\") pod \"cinder-api-0\" (UID: \"fe3d321f-e7cf-44de-9419-b7e40191ba52\") " pod="openstack/cinder-api-0" Oct 07 08:14:12 crc kubenswrapper[4875]: I1007 08:14:12.682406 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7f159cb5-e32e-4e8d-acac-0ec48e2ab021-config\") pod \"dnsmasq-dns-5cc8b5d5c5-nw28k\" (UID: \"7f159cb5-e32e-4e8d-acac-0ec48e2ab021\") " pod="openstack/dnsmasq-dns-5cc8b5d5c5-nw28k" Oct 07 08:14:12 crc kubenswrapper[4875]: I1007 08:14:12.682476 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fe3d321f-e7cf-44de-9419-b7e40191ba52-config-data\") pod \"cinder-api-0\" (UID: \"fe3d321f-e7cf-44de-9419-b7e40191ba52\") " pod="openstack/cinder-api-0" Oct 07 08:14:12 crc kubenswrapper[4875]: I1007 08:14:12.682507 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fe3d321f-e7cf-44de-9419-b7e40191ba52-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"fe3d321f-e7cf-44de-9419-b7e40191ba52\") " pod="openstack/cinder-api-0" Oct 07 08:14:12 crc kubenswrapper[4875]: I1007 08:14:12.682567 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7f159cb5-e32e-4e8d-acac-0ec48e2ab021-ovsdbserver-nb\") pod \"dnsmasq-dns-5cc8b5d5c5-nw28k\" (UID: \"7f159cb5-e32e-4e8d-acac-0ec48e2ab021\") " pod="openstack/dnsmasq-dns-5cc8b5d5c5-nw28k" Oct 07 08:14:12 crc kubenswrapper[4875]: I1007 08:14:12.682594 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7f159cb5-e32e-4e8d-acac-0ec48e2ab021-ovsdbserver-sb\") pod \"dnsmasq-dns-5cc8b5d5c5-nw28k\" (UID: \"7f159cb5-e32e-4e8d-acac-0ec48e2ab021\") " pod="openstack/dnsmasq-dns-5cc8b5d5c5-nw28k" Oct 07 08:14:12 crc kubenswrapper[4875]: I1007 08:14:12.682632 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/7f159cb5-e32e-4e8d-acac-0ec48e2ab021-dns-swift-storage-0\") pod \"dnsmasq-dns-5cc8b5d5c5-nw28k\" (UID: \"7f159cb5-e32e-4e8d-acac-0ec48e2ab021\") " pod="openstack/dnsmasq-dns-5cc8b5d5c5-nw28k" Oct 07 08:14:12 crc kubenswrapper[4875]: I1007 08:14:12.682653 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/fe3d321f-e7cf-44de-9419-b7e40191ba52-config-data-custom\") pod \"cinder-api-0\" (UID: \"fe3d321f-e7cf-44de-9419-b7e40191ba52\") " pod="openstack/cinder-api-0" Oct 07 08:14:12 crc kubenswrapper[4875]: I1007 08:14:12.683542 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bv699\" (UniqueName: \"kubernetes.io/projected/fe3d321f-e7cf-44de-9419-b7e40191ba52-kube-api-access-bv699\") pod \"cinder-api-0\" (UID: \"fe3d321f-e7cf-44de-9419-b7e40191ba52\") " pod="openstack/cinder-api-0" Oct 07 08:14:12 crc kubenswrapper[4875]: I1007 08:14:12.683667 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/fe3d321f-e7cf-44de-9419-b7e40191ba52-etc-machine-id\") pod \"cinder-api-0\" (UID: \"fe3d321f-e7cf-44de-9419-b7e40191ba52\") " pod="openstack/cinder-api-0" Oct 07 08:14:12 crc kubenswrapper[4875]: I1007 08:14:12.683709 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dtdbv\" (UniqueName: \"kubernetes.io/projected/7f159cb5-e32e-4e8d-acac-0ec48e2ab021-kube-api-access-dtdbv\") pod \"dnsmasq-dns-5cc8b5d5c5-nw28k\" (UID: \"7f159cb5-e32e-4e8d-acac-0ec48e2ab021\") " pod="openstack/dnsmasq-dns-5cc8b5d5c5-nw28k" Oct 07 08:14:12 crc kubenswrapper[4875]: I1007 08:14:12.683742 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fe3d321f-e7cf-44de-9419-b7e40191ba52-scripts\") pod \"cinder-api-0\" (UID: \"fe3d321f-e7cf-44de-9419-b7e40191ba52\") " pod="openstack/cinder-api-0" Oct 07 08:14:12 crc kubenswrapper[4875]: I1007 08:14:12.684782 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7f159cb5-e32e-4e8d-acac-0ec48e2ab021-ovsdbserver-nb\") pod \"dnsmasq-dns-5cc8b5d5c5-nw28k\" (UID: \"7f159cb5-e32e-4e8d-acac-0ec48e2ab021\") " pod="openstack/dnsmasq-dns-5cc8b5d5c5-nw28k" Oct 07 08:14:12 crc kubenswrapper[4875]: I1007 08:14:12.685608 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/fe3d321f-e7cf-44de-9419-b7e40191ba52-etc-machine-id\") pod \"cinder-api-0\" (UID: \"fe3d321f-e7cf-44de-9419-b7e40191ba52\") " pod="openstack/cinder-api-0" Oct 07 08:14:12 crc kubenswrapper[4875]: I1007 08:14:12.686202 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7f159cb5-e32e-4e8d-acac-0ec48e2ab021-config\") pod \"dnsmasq-dns-5cc8b5d5c5-nw28k\" (UID: \"7f159cb5-e32e-4e8d-acac-0ec48e2ab021\") " pod="openstack/dnsmasq-dns-5cc8b5d5c5-nw28k" Oct 07 08:14:12 crc kubenswrapper[4875]: I1007 08:14:12.686579 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fe3d321f-e7cf-44de-9419-b7e40191ba52-logs\") pod \"cinder-api-0\" (UID: \"fe3d321f-e7cf-44de-9419-b7e40191ba52\") " pod="openstack/cinder-api-0" Oct 07 08:14:12 crc kubenswrapper[4875]: I1007 08:14:12.692322 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/7f159cb5-e32e-4e8d-acac-0ec48e2ab021-dns-swift-storage-0\") pod \"dnsmasq-dns-5cc8b5d5c5-nw28k\" (UID: \"7f159cb5-e32e-4e8d-acac-0ec48e2ab021\") " pod="openstack/dnsmasq-dns-5cc8b5d5c5-nw28k" Oct 07 08:14:12 crc kubenswrapper[4875]: I1007 08:14:12.692741 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fe3d321f-e7cf-44de-9419-b7e40191ba52-scripts\") pod \"cinder-api-0\" (UID: \"fe3d321f-e7cf-44de-9419-b7e40191ba52\") " pod="openstack/cinder-api-0" Oct 07 08:14:12 crc kubenswrapper[4875]: I1007 08:14:12.701785 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7f159cb5-e32e-4e8d-acac-0ec48e2ab021-ovsdbserver-sb\") pod \"dnsmasq-dns-5cc8b5d5c5-nw28k\" (UID: \"7f159cb5-e32e-4e8d-acac-0ec48e2ab021\") " pod="openstack/dnsmasq-dns-5cc8b5d5c5-nw28k" Oct 07 08:14:12 crc kubenswrapper[4875]: I1007 08:14:12.701996 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7f159cb5-e32e-4e8d-acac-0ec48e2ab021-dns-svc\") pod \"dnsmasq-dns-5cc8b5d5c5-nw28k\" (UID: \"7f159cb5-e32e-4e8d-acac-0ec48e2ab021\") " pod="openstack/dnsmasq-dns-5cc8b5d5c5-nw28k" Oct 07 08:14:12 crc kubenswrapper[4875]: I1007 08:14:12.702456 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fe3d321f-e7cf-44de-9419-b7e40191ba52-config-data\") pod \"cinder-api-0\" (UID: \"fe3d321f-e7cf-44de-9419-b7e40191ba52\") " pod="openstack/cinder-api-0" Oct 07 08:14:12 crc kubenswrapper[4875]: I1007 08:14:12.702622 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fe3d321f-e7cf-44de-9419-b7e40191ba52-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"fe3d321f-e7cf-44de-9419-b7e40191ba52\") " pod="openstack/cinder-api-0" Oct 07 08:14:12 crc kubenswrapper[4875]: I1007 08:14:12.706356 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/fe3d321f-e7cf-44de-9419-b7e40191ba52-config-data-custom\") pod \"cinder-api-0\" (UID: \"fe3d321f-e7cf-44de-9419-b7e40191ba52\") " pod="openstack/cinder-api-0" Oct 07 08:14:12 crc kubenswrapper[4875]: I1007 08:14:12.706620 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dtdbv\" (UniqueName: \"kubernetes.io/projected/7f159cb5-e32e-4e8d-acac-0ec48e2ab021-kube-api-access-dtdbv\") pod \"dnsmasq-dns-5cc8b5d5c5-nw28k\" (UID: \"7f159cb5-e32e-4e8d-acac-0ec48e2ab021\") " pod="openstack/dnsmasq-dns-5cc8b5d5c5-nw28k" Oct 07 08:14:12 crc kubenswrapper[4875]: I1007 08:14:12.726622 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bv699\" (UniqueName: \"kubernetes.io/projected/fe3d321f-e7cf-44de-9419-b7e40191ba52-kube-api-access-bv699\") pod \"cinder-api-0\" (UID: \"fe3d321f-e7cf-44de-9419-b7e40191ba52\") " pod="openstack/cinder-api-0" Oct 07 08:14:12 crc kubenswrapper[4875]: I1007 08:14:12.788570 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5cc8b5d5c5-nw28k" Oct 07 08:14:12 crc kubenswrapper[4875]: I1007 08:14:12.808650 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-6f66dddcf4-7856h" Oct 07 08:14:12 crc kubenswrapper[4875]: I1007 08:14:12.814158 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Oct 07 08:14:12 crc kubenswrapper[4875]: I1007 08:14:12.912666 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Oct 07 08:14:13 crc kubenswrapper[4875]: I1007 08:14:13.179531 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-56df8fb6b7-l2fn2" Oct 07 08:14:13 crc kubenswrapper[4875]: I1007 08:14:13.188737 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-rrk4v" Oct 07 08:14:13 crc kubenswrapper[4875]: I1007 08:14:13.192571 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/927a5f2e-c935-47bb-b7b3-0efc834566ca-config\") pod \"927a5f2e-c935-47bb-b7b3-0efc834566ca\" (UID: \"927a5f2e-c935-47bb-b7b3-0efc834566ca\") " Oct 07 08:14:13 crc kubenswrapper[4875]: I1007 08:14:13.192631 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4g2qt\" (UniqueName: \"kubernetes.io/projected/5b0d35a4-cfa4-4c0a-8ca2-4db85aefc765-kube-api-access-4g2qt\") pod \"5b0d35a4-cfa4-4c0a-8ca2-4db85aefc765\" (UID: \"5b0d35a4-cfa4-4c0a-8ca2-4db85aefc765\") " Oct 07 08:14:13 crc kubenswrapper[4875]: I1007 08:14:13.192747 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wfp2w\" (UniqueName: \"kubernetes.io/projected/927a5f2e-c935-47bb-b7b3-0efc834566ca-kube-api-access-wfp2w\") pod \"927a5f2e-c935-47bb-b7b3-0efc834566ca\" (UID: \"927a5f2e-c935-47bb-b7b3-0efc834566ca\") " Oct 07 08:14:13 crc kubenswrapper[4875]: I1007 08:14:13.192777 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/5b0d35a4-cfa4-4c0a-8ca2-4db85aefc765-dns-swift-storage-0\") pod \"5b0d35a4-cfa4-4c0a-8ca2-4db85aefc765\" (UID: \"5b0d35a4-cfa4-4c0a-8ca2-4db85aefc765\") " Oct 07 08:14:13 crc kubenswrapper[4875]: I1007 08:14:13.192807 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5b0d35a4-cfa4-4c0a-8ca2-4db85aefc765-config\") pod \"5b0d35a4-cfa4-4c0a-8ca2-4db85aefc765\" (UID: \"5b0d35a4-cfa4-4c0a-8ca2-4db85aefc765\") " Oct 07 08:14:13 crc kubenswrapper[4875]: I1007 08:14:13.192900 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/927a5f2e-c935-47bb-b7b3-0efc834566ca-combined-ca-bundle\") pod \"927a5f2e-c935-47bb-b7b3-0efc834566ca\" (UID: \"927a5f2e-c935-47bb-b7b3-0efc834566ca\") " Oct 07 08:14:13 crc kubenswrapper[4875]: I1007 08:14:13.192939 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5b0d35a4-cfa4-4c0a-8ca2-4db85aefc765-ovsdbserver-nb\") pod \"5b0d35a4-cfa4-4c0a-8ca2-4db85aefc765\" (UID: \"5b0d35a4-cfa4-4c0a-8ca2-4db85aefc765\") " Oct 07 08:14:13 crc kubenswrapper[4875]: I1007 08:14:13.193040 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5b0d35a4-cfa4-4c0a-8ca2-4db85aefc765-ovsdbserver-sb\") pod \"5b0d35a4-cfa4-4c0a-8ca2-4db85aefc765\" (UID: \"5b0d35a4-cfa4-4c0a-8ca2-4db85aefc765\") " Oct 07 08:14:13 crc kubenswrapper[4875]: I1007 08:14:13.193082 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5b0d35a4-cfa4-4c0a-8ca2-4db85aefc765-dns-svc\") pod \"5b0d35a4-cfa4-4c0a-8ca2-4db85aefc765\" (UID: \"5b0d35a4-cfa4-4c0a-8ca2-4db85aefc765\") " Oct 07 08:14:13 crc kubenswrapper[4875]: I1007 08:14:13.199962 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b0d35a4-cfa4-4c0a-8ca2-4db85aefc765-kube-api-access-4g2qt" (OuterVolumeSpecName: "kube-api-access-4g2qt") pod "5b0d35a4-cfa4-4c0a-8ca2-4db85aefc765" (UID: "5b0d35a4-cfa4-4c0a-8ca2-4db85aefc765"). InnerVolumeSpecName "kube-api-access-4g2qt". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 08:14:13 crc kubenswrapper[4875]: I1007 08:14:13.224578 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/927a5f2e-c935-47bb-b7b3-0efc834566ca-kube-api-access-wfp2w" (OuterVolumeSpecName: "kube-api-access-wfp2w") pod "927a5f2e-c935-47bb-b7b3-0efc834566ca" (UID: "927a5f2e-c935-47bb-b7b3-0efc834566ca"). InnerVolumeSpecName "kube-api-access-wfp2w". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 08:14:13 crc kubenswrapper[4875]: I1007 08:14:13.295145 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4g2qt\" (UniqueName: \"kubernetes.io/projected/5b0d35a4-cfa4-4c0a-8ca2-4db85aefc765-kube-api-access-4g2qt\") on node \"crc\" DevicePath \"\"" Oct 07 08:14:13 crc kubenswrapper[4875]: I1007 08:14:13.295181 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wfp2w\" (UniqueName: \"kubernetes.io/projected/927a5f2e-c935-47bb-b7b3-0efc834566ca-kube-api-access-wfp2w\") on node \"crc\" DevicePath \"\"" Oct 07 08:14:13 crc kubenswrapper[4875]: I1007 08:14:13.326560 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5b0d35a4-cfa4-4c0a-8ca2-4db85aefc765-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "5b0d35a4-cfa4-4c0a-8ca2-4db85aefc765" (UID: "5b0d35a4-cfa4-4c0a-8ca2-4db85aefc765"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 08:14:13 crc kubenswrapper[4875]: I1007 08:14:13.362113 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/927a5f2e-c935-47bb-b7b3-0efc834566ca-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "927a5f2e-c935-47bb-b7b3-0efc834566ca" (UID: "927a5f2e-c935-47bb-b7b3-0efc834566ca"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:14:13 crc kubenswrapper[4875]: I1007 08:14:13.394029 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/927a5f2e-c935-47bb-b7b3-0efc834566ca-config" (OuterVolumeSpecName: "config") pod "927a5f2e-c935-47bb-b7b3-0efc834566ca" (UID: "927a5f2e-c935-47bb-b7b3-0efc834566ca"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:14:13 crc kubenswrapper[4875]: I1007 08:14:13.396019 4875 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5b0d35a4-cfa4-4c0a-8ca2-4db85aefc765-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 07 08:14:13 crc kubenswrapper[4875]: I1007 08:14:13.396116 4875 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/927a5f2e-c935-47bb-b7b3-0efc834566ca-config\") on node \"crc\" DevicePath \"\"" Oct 07 08:14:13 crc kubenswrapper[4875]: I1007 08:14:13.396178 4875 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/927a5f2e-c935-47bb-b7b3-0efc834566ca-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 08:14:13 crc kubenswrapper[4875]: I1007 08:14:13.467066 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5b0d35a4-cfa4-4c0a-8ca2-4db85aefc765-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "5b0d35a4-cfa4-4c0a-8ca2-4db85aefc765" (UID: "5b0d35a4-cfa4-4c0a-8ca2-4db85aefc765"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 08:14:13 crc kubenswrapper[4875]: I1007 08:14:13.477020 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5b0d35a4-cfa4-4c0a-8ca2-4db85aefc765-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "5b0d35a4-cfa4-4c0a-8ca2-4db85aefc765" (UID: "5b0d35a4-cfa4-4c0a-8ca2-4db85aefc765"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 08:14:13 crc kubenswrapper[4875]: I1007 08:14:13.479221 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5b0d35a4-cfa4-4c0a-8ca2-4db85aefc765-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "5b0d35a4-cfa4-4c0a-8ca2-4db85aefc765" (UID: "5b0d35a4-cfa4-4c0a-8ca2-4db85aefc765"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 08:14:13 crc kubenswrapper[4875]: I1007 08:14:13.483077 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5b0d35a4-cfa4-4c0a-8ca2-4db85aefc765-config" (OuterVolumeSpecName: "config") pod "5b0d35a4-cfa4-4c0a-8ca2-4db85aefc765" (UID: "5b0d35a4-cfa4-4c0a-8ca2-4db85aefc765"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 08:14:13 crc kubenswrapper[4875]: I1007 08:14:13.498658 4875 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/5b0d35a4-cfa4-4c0a-8ca2-4db85aefc765-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Oct 07 08:14:13 crc kubenswrapper[4875]: I1007 08:14:13.498696 4875 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5b0d35a4-cfa4-4c0a-8ca2-4db85aefc765-config\") on node \"crc\" DevicePath \"\"" Oct 07 08:14:13 crc kubenswrapper[4875]: I1007 08:14:13.498706 4875 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5b0d35a4-cfa4-4c0a-8ca2-4db85aefc765-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 07 08:14:13 crc kubenswrapper[4875]: I1007 08:14:13.498715 4875 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5b0d35a4-cfa4-4c0a-8ca2-4db85aefc765-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 07 08:14:13 crc kubenswrapper[4875]: I1007 08:14:13.909914 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-6f66dddcf4-7856h" Oct 07 08:14:14 crc kubenswrapper[4875]: I1007 08:14:14.043213 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5cc8b5d5c5-nw28k"] Oct 07 08:14:14 crc kubenswrapper[4875]: I1007 08:14:14.079545 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-rrk4v" event={"ID":"927a5f2e-c935-47bb-b7b3-0efc834566ca","Type":"ContainerDied","Data":"4a2917a7fddd306d58d4a421aac63e007ee6b22b245be6dcf4d617005d2bf62a"} Oct 07 08:14:14 crc kubenswrapper[4875]: I1007 08:14:14.079955 4875 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4a2917a7fddd306d58d4a421aac63e007ee6b22b245be6dcf4d617005d2bf62a" Oct 07 08:14:14 crc kubenswrapper[4875]: I1007 08:14:14.079606 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-rrk4v" Oct 07 08:14:14 crc kubenswrapper[4875]: I1007 08:14:14.090636 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-56df8fb6b7-l2fn2" event={"ID":"5b0d35a4-cfa4-4c0a-8ca2-4db85aefc765","Type":"ContainerDied","Data":"29d5c73a3f9edf536a1f867c1c6de2199360729a987e7872b3ab1e386a866422"} Oct 07 08:14:14 crc kubenswrapper[4875]: I1007 08:14:14.090746 4875 scope.go:117] "RemoveContainer" containerID="3e9aacff9d467b59a8621d49d0e53e11257a4e6e0170bece7f549aefa5f88482" Oct 07 08:14:14 crc kubenswrapper[4875]: I1007 08:14:14.091014 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-56df8fb6b7-l2fn2" Oct 07 08:14:14 crc kubenswrapper[4875]: I1007 08:14:14.127755 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0149ac72-d21f-45a8-a322-a97d03c1d85a","Type":"ContainerStarted","Data":"28c73eda21e08d770864214760468a696f3c63bed8bc29efe198918213e75f60"} Oct 07 08:14:14 crc kubenswrapper[4875]: I1007 08:14:14.128111 4875 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="0149ac72-d21f-45a8-a322-a97d03c1d85a" containerName="ceilometer-central-agent" containerID="cri-o://63c478e4e67c6d548105ff3e8393eb7b91f350c3bc1b76640a2274b246438439" gracePeriod=30 Oct 07 08:14:14 crc kubenswrapper[4875]: I1007 08:14:14.128278 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Oct 07 08:14:14 crc kubenswrapper[4875]: I1007 08:14:14.128771 4875 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="0149ac72-d21f-45a8-a322-a97d03c1d85a" containerName="proxy-httpd" containerID="cri-o://28c73eda21e08d770864214760468a696f3c63bed8bc29efe198918213e75f60" gracePeriod=30 Oct 07 08:14:14 crc kubenswrapper[4875]: I1007 08:14:14.128835 4875 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="0149ac72-d21f-45a8-a322-a97d03c1d85a" containerName="sg-core" containerID="cri-o://39da007b9e9dd5e515680790376915507daa320030eab5508889ef386920526b" gracePeriod=30 Oct 07 08:14:14 crc kubenswrapper[4875]: I1007 08:14:14.128904 4875 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="0149ac72-d21f-45a8-a322-a97d03c1d85a" containerName="ceilometer-notification-agent" containerID="cri-o://2319590f78bee1333615b5e2d800305af8de4eaf7a85d56f583ba89474a8ca66" gracePeriod=30 Oct 07 08:14:14 crc kubenswrapper[4875]: I1007 08:14:14.156778 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Oct 07 08:14:14 crc kubenswrapper[4875]: I1007 08:14:14.165479 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.891787648 podStartE2EDuration="54.165451708s" podCreationTimestamp="2025-10-07 08:13:20 +0000 UTC" firstStartedPulling="2025-10-07 08:13:22.161083078 +0000 UTC m=+1027.120853621" lastFinishedPulling="2025-10-07 08:14:13.434747138 +0000 UTC m=+1078.394517681" observedRunningTime="2025-10-07 08:14:14.150347154 +0000 UTC m=+1079.110117697" watchObservedRunningTime="2025-10-07 08:14:14.165451708 +0000 UTC m=+1079.125222251" Oct 07 08:14:14 crc kubenswrapper[4875]: I1007 08:14:14.204104 4875 scope.go:117] "RemoveContainer" containerID="939245df2ac0673eccd8af823818dba7474bbf36457570b9f4fa2c55c136d5ac" Oct 07 08:14:14 crc kubenswrapper[4875]: I1007 08:14:14.213132 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Oct 07 08:14:14 crc kubenswrapper[4875]: I1007 08:14:14.246653 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-56df8fb6b7-l2fn2"] Oct 07 08:14:14 crc kubenswrapper[4875]: I1007 08:14:14.268078 4875 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-56df8fb6b7-l2fn2"] Oct 07 08:14:14 crc kubenswrapper[4875]: I1007 08:14:14.435661 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5cc8b5d5c5-nw28k"] Oct 07 08:14:14 crc kubenswrapper[4875]: I1007 08:14:14.498718 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6578955fd5-6k89r"] Oct 07 08:14:14 crc kubenswrapper[4875]: E1007 08:14:14.499313 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5b0d35a4-cfa4-4c0a-8ca2-4db85aefc765" containerName="init" Oct 07 08:14:14 crc kubenswrapper[4875]: I1007 08:14:14.499342 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="5b0d35a4-cfa4-4c0a-8ca2-4db85aefc765" containerName="init" Oct 07 08:14:14 crc kubenswrapper[4875]: E1007 08:14:14.499371 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5b0d35a4-cfa4-4c0a-8ca2-4db85aefc765" containerName="dnsmasq-dns" Oct 07 08:14:14 crc kubenswrapper[4875]: I1007 08:14:14.499377 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="5b0d35a4-cfa4-4c0a-8ca2-4db85aefc765" containerName="dnsmasq-dns" Oct 07 08:14:14 crc kubenswrapper[4875]: E1007 08:14:14.499397 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="927a5f2e-c935-47bb-b7b3-0efc834566ca" containerName="neutron-db-sync" Oct 07 08:14:14 crc kubenswrapper[4875]: I1007 08:14:14.499403 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="927a5f2e-c935-47bb-b7b3-0efc834566ca" containerName="neutron-db-sync" Oct 07 08:14:14 crc kubenswrapper[4875]: I1007 08:14:14.499594 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="5b0d35a4-cfa4-4c0a-8ca2-4db85aefc765" containerName="dnsmasq-dns" Oct 07 08:14:14 crc kubenswrapper[4875]: I1007 08:14:14.499622 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="927a5f2e-c935-47bb-b7b3-0efc834566ca" containerName="neutron-db-sync" Oct 07 08:14:14 crc kubenswrapper[4875]: I1007 08:14:14.500811 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6578955fd5-6k89r" Oct 07 08:14:14 crc kubenswrapper[4875]: I1007 08:14:14.537006 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6578955fd5-6k89r"] Oct 07 08:14:14 crc kubenswrapper[4875]: I1007 08:14:14.560543 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jdcwn\" (UniqueName: \"kubernetes.io/projected/b243a457-ccd1-4c3d-b798-9924c6dfcf45-kube-api-access-jdcwn\") pod \"dnsmasq-dns-6578955fd5-6k89r\" (UID: \"b243a457-ccd1-4c3d-b798-9924c6dfcf45\") " pod="openstack/dnsmasq-dns-6578955fd5-6k89r" Oct 07 08:14:14 crc kubenswrapper[4875]: I1007 08:14:14.562437 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b243a457-ccd1-4c3d-b798-9924c6dfcf45-config\") pod \"dnsmasq-dns-6578955fd5-6k89r\" (UID: \"b243a457-ccd1-4c3d-b798-9924c6dfcf45\") " pod="openstack/dnsmasq-dns-6578955fd5-6k89r" Oct 07 08:14:14 crc kubenswrapper[4875]: I1007 08:14:14.562683 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b243a457-ccd1-4c3d-b798-9924c6dfcf45-dns-svc\") pod \"dnsmasq-dns-6578955fd5-6k89r\" (UID: \"b243a457-ccd1-4c3d-b798-9924c6dfcf45\") " pod="openstack/dnsmasq-dns-6578955fd5-6k89r" Oct 07 08:14:14 crc kubenswrapper[4875]: I1007 08:14:14.562789 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/b243a457-ccd1-4c3d-b798-9924c6dfcf45-dns-swift-storage-0\") pod \"dnsmasq-dns-6578955fd5-6k89r\" (UID: \"b243a457-ccd1-4c3d-b798-9924c6dfcf45\") " pod="openstack/dnsmasq-dns-6578955fd5-6k89r" Oct 07 08:14:14 crc kubenswrapper[4875]: I1007 08:14:14.563330 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b243a457-ccd1-4c3d-b798-9924c6dfcf45-ovsdbserver-sb\") pod \"dnsmasq-dns-6578955fd5-6k89r\" (UID: \"b243a457-ccd1-4c3d-b798-9924c6dfcf45\") " pod="openstack/dnsmasq-dns-6578955fd5-6k89r" Oct 07 08:14:14 crc kubenswrapper[4875]: I1007 08:14:14.563455 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b243a457-ccd1-4c3d-b798-9924c6dfcf45-ovsdbserver-nb\") pod \"dnsmasq-dns-6578955fd5-6k89r\" (UID: \"b243a457-ccd1-4c3d-b798-9924c6dfcf45\") " pod="openstack/dnsmasq-dns-6578955fd5-6k89r" Oct 07 08:14:14 crc kubenswrapper[4875]: I1007 08:14:14.603552 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-7877fc45f6-htlwc"] Oct 07 08:14:14 crc kubenswrapper[4875]: I1007 08:14:14.605450 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-7877fc45f6-htlwc" Oct 07 08:14:14 crc kubenswrapper[4875]: I1007 08:14:14.610353 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-ovndbs" Oct 07 08:14:14 crc kubenswrapper[4875]: I1007 08:14:14.610593 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Oct 07 08:14:14 crc kubenswrapper[4875]: I1007 08:14:14.610723 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-sfkpt" Oct 07 08:14:14 crc kubenswrapper[4875]: I1007 08:14:14.611388 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Oct 07 08:14:14 crc kubenswrapper[4875]: I1007 08:14:14.657468 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-7877fc45f6-htlwc"] Oct 07 08:14:14 crc kubenswrapper[4875]: I1007 08:14:14.679124 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Oct 07 08:14:14 crc kubenswrapper[4875]: I1007 08:14:14.682201 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b243a457-ccd1-4c3d-b798-9924c6dfcf45-dns-svc\") pod \"dnsmasq-dns-6578955fd5-6k89r\" (UID: \"b243a457-ccd1-4c3d-b798-9924c6dfcf45\") " pod="openstack/dnsmasq-dns-6578955fd5-6k89r" Oct 07 08:14:14 crc kubenswrapper[4875]: I1007 08:14:14.682277 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/b243a457-ccd1-4c3d-b798-9924c6dfcf45-dns-swift-storage-0\") pod \"dnsmasq-dns-6578955fd5-6k89r\" (UID: \"b243a457-ccd1-4c3d-b798-9924c6dfcf45\") " pod="openstack/dnsmasq-dns-6578955fd5-6k89r" Oct 07 08:14:14 crc kubenswrapper[4875]: I1007 08:14:14.682325 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/2129b276-6537-40de-b872-ac05e2ab8545-config\") pod \"neutron-7877fc45f6-htlwc\" (UID: \"2129b276-6537-40de-b872-ac05e2ab8545\") " pod="openstack/neutron-7877fc45f6-htlwc" Oct 07 08:14:14 crc kubenswrapper[4875]: I1007 08:14:14.682351 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/2129b276-6537-40de-b872-ac05e2ab8545-ovndb-tls-certs\") pod \"neutron-7877fc45f6-htlwc\" (UID: \"2129b276-6537-40de-b872-ac05e2ab8545\") " pod="openstack/neutron-7877fc45f6-htlwc" Oct 07 08:14:14 crc kubenswrapper[4875]: I1007 08:14:14.682411 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b243a457-ccd1-4c3d-b798-9924c6dfcf45-ovsdbserver-sb\") pod \"dnsmasq-dns-6578955fd5-6k89r\" (UID: \"b243a457-ccd1-4c3d-b798-9924c6dfcf45\") " pod="openstack/dnsmasq-dns-6578955fd5-6k89r" Oct 07 08:14:14 crc kubenswrapper[4875]: I1007 08:14:14.682428 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b243a457-ccd1-4c3d-b798-9924c6dfcf45-ovsdbserver-nb\") pod \"dnsmasq-dns-6578955fd5-6k89r\" (UID: \"b243a457-ccd1-4c3d-b798-9924c6dfcf45\") " pod="openstack/dnsmasq-dns-6578955fd5-6k89r" Oct 07 08:14:14 crc kubenswrapper[4875]: I1007 08:14:14.682507 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jdcwn\" (UniqueName: \"kubernetes.io/projected/b243a457-ccd1-4c3d-b798-9924c6dfcf45-kube-api-access-jdcwn\") pod \"dnsmasq-dns-6578955fd5-6k89r\" (UID: \"b243a457-ccd1-4c3d-b798-9924c6dfcf45\") " pod="openstack/dnsmasq-dns-6578955fd5-6k89r" Oct 07 08:14:14 crc kubenswrapper[4875]: I1007 08:14:14.682536 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8jdc8\" (UniqueName: \"kubernetes.io/projected/2129b276-6537-40de-b872-ac05e2ab8545-kube-api-access-8jdc8\") pod \"neutron-7877fc45f6-htlwc\" (UID: \"2129b276-6537-40de-b872-ac05e2ab8545\") " pod="openstack/neutron-7877fc45f6-htlwc" Oct 07 08:14:14 crc kubenswrapper[4875]: I1007 08:14:14.682578 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b243a457-ccd1-4c3d-b798-9924c6dfcf45-config\") pod \"dnsmasq-dns-6578955fd5-6k89r\" (UID: \"b243a457-ccd1-4c3d-b798-9924c6dfcf45\") " pod="openstack/dnsmasq-dns-6578955fd5-6k89r" Oct 07 08:14:14 crc kubenswrapper[4875]: I1007 08:14:14.682636 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2129b276-6537-40de-b872-ac05e2ab8545-combined-ca-bundle\") pod \"neutron-7877fc45f6-htlwc\" (UID: \"2129b276-6537-40de-b872-ac05e2ab8545\") " pod="openstack/neutron-7877fc45f6-htlwc" Oct 07 08:14:14 crc kubenswrapper[4875]: I1007 08:14:14.682661 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/2129b276-6537-40de-b872-ac05e2ab8545-httpd-config\") pod \"neutron-7877fc45f6-htlwc\" (UID: \"2129b276-6537-40de-b872-ac05e2ab8545\") " pod="openstack/neutron-7877fc45f6-htlwc" Oct 07 08:14:14 crc kubenswrapper[4875]: I1007 08:14:14.686576 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b243a457-ccd1-4c3d-b798-9924c6dfcf45-ovsdbserver-nb\") pod \"dnsmasq-dns-6578955fd5-6k89r\" (UID: \"b243a457-ccd1-4c3d-b798-9924c6dfcf45\") " pod="openstack/dnsmasq-dns-6578955fd5-6k89r" Oct 07 08:14:14 crc kubenswrapper[4875]: I1007 08:14:14.687440 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/b243a457-ccd1-4c3d-b798-9924c6dfcf45-dns-swift-storage-0\") pod \"dnsmasq-dns-6578955fd5-6k89r\" (UID: \"b243a457-ccd1-4c3d-b798-9924c6dfcf45\") " pod="openstack/dnsmasq-dns-6578955fd5-6k89r" Oct 07 08:14:14 crc kubenswrapper[4875]: I1007 08:14:14.688086 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b243a457-ccd1-4c3d-b798-9924c6dfcf45-ovsdbserver-sb\") pod \"dnsmasq-dns-6578955fd5-6k89r\" (UID: \"b243a457-ccd1-4c3d-b798-9924c6dfcf45\") " pod="openstack/dnsmasq-dns-6578955fd5-6k89r" Oct 07 08:14:14 crc kubenswrapper[4875]: I1007 08:14:14.688909 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b243a457-ccd1-4c3d-b798-9924c6dfcf45-config\") pod \"dnsmasq-dns-6578955fd5-6k89r\" (UID: \"b243a457-ccd1-4c3d-b798-9924c6dfcf45\") " pod="openstack/dnsmasq-dns-6578955fd5-6k89r" Oct 07 08:14:14 crc kubenswrapper[4875]: I1007 08:14:14.693970 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b243a457-ccd1-4c3d-b798-9924c6dfcf45-dns-svc\") pod \"dnsmasq-dns-6578955fd5-6k89r\" (UID: \"b243a457-ccd1-4c3d-b798-9924c6dfcf45\") " pod="openstack/dnsmasq-dns-6578955fd5-6k89r" Oct 07 08:14:14 crc kubenswrapper[4875]: I1007 08:14:14.733590 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jdcwn\" (UniqueName: \"kubernetes.io/projected/b243a457-ccd1-4c3d-b798-9924c6dfcf45-kube-api-access-jdcwn\") pod \"dnsmasq-dns-6578955fd5-6k89r\" (UID: \"b243a457-ccd1-4c3d-b798-9924c6dfcf45\") " pod="openstack/dnsmasq-dns-6578955fd5-6k89r" Oct 07 08:14:14 crc kubenswrapper[4875]: I1007 08:14:14.784169 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8jdc8\" (UniqueName: \"kubernetes.io/projected/2129b276-6537-40de-b872-ac05e2ab8545-kube-api-access-8jdc8\") pod \"neutron-7877fc45f6-htlwc\" (UID: \"2129b276-6537-40de-b872-ac05e2ab8545\") " pod="openstack/neutron-7877fc45f6-htlwc" Oct 07 08:14:14 crc kubenswrapper[4875]: I1007 08:14:14.784282 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2129b276-6537-40de-b872-ac05e2ab8545-combined-ca-bundle\") pod \"neutron-7877fc45f6-htlwc\" (UID: \"2129b276-6537-40de-b872-ac05e2ab8545\") " pod="openstack/neutron-7877fc45f6-htlwc" Oct 07 08:14:14 crc kubenswrapper[4875]: I1007 08:14:14.784304 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/2129b276-6537-40de-b872-ac05e2ab8545-httpd-config\") pod \"neutron-7877fc45f6-htlwc\" (UID: \"2129b276-6537-40de-b872-ac05e2ab8545\") " pod="openstack/neutron-7877fc45f6-htlwc" Oct 07 08:14:14 crc kubenswrapper[4875]: I1007 08:14:14.784384 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/2129b276-6537-40de-b872-ac05e2ab8545-config\") pod \"neutron-7877fc45f6-htlwc\" (UID: \"2129b276-6537-40de-b872-ac05e2ab8545\") " pod="openstack/neutron-7877fc45f6-htlwc" Oct 07 08:14:14 crc kubenswrapper[4875]: I1007 08:14:14.784405 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/2129b276-6537-40de-b872-ac05e2ab8545-ovndb-tls-certs\") pod \"neutron-7877fc45f6-htlwc\" (UID: \"2129b276-6537-40de-b872-ac05e2ab8545\") " pod="openstack/neutron-7877fc45f6-htlwc" Oct 07 08:14:14 crc kubenswrapper[4875]: I1007 08:14:14.807124 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2129b276-6537-40de-b872-ac05e2ab8545-combined-ca-bundle\") pod \"neutron-7877fc45f6-htlwc\" (UID: \"2129b276-6537-40de-b872-ac05e2ab8545\") " pod="openstack/neutron-7877fc45f6-htlwc" Oct 07 08:14:14 crc kubenswrapper[4875]: I1007 08:14:14.811940 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/2129b276-6537-40de-b872-ac05e2ab8545-config\") pod \"neutron-7877fc45f6-htlwc\" (UID: \"2129b276-6537-40de-b872-ac05e2ab8545\") " pod="openstack/neutron-7877fc45f6-htlwc" Oct 07 08:14:14 crc kubenswrapper[4875]: I1007 08:14:14.812723 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/2129b276-6537-40de-b872-ac05e2ab8545-ovndb-tls-certs\") pod \"neutron-7877fc45f6-htlwc\" (UID: \"2129b276-6537-40de-b872-ac05e2ab8545\") " pod="openstack/neutron-7877fc45f6-htlwc" Oct 07 08:14:14 crc kubenswrapper[4875]: I1007 08:14:14.815017 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/2129b276-6537-40de-b872-ac05e2ab8545-httpd-config\") pod \"neutron-7877fc45f6-htlwc\" (UID: \"2129b276-6537-40de-b872-ac05e2ab8545\") " pod="openstack/neutron-7877fc45f6-htlwc" Oct 07 08:14:14 crc kubenswrapper[4875]: I1007 08:14:14.839725 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8jdc8\" (UniqueName: \"kubernetes.io/projected/2129b276-6537-40de-b872-ac05e2ab8545-kube-api-access-8jdc8\") pod \"neutron-7877fc45f6-htlwc\" (UID: \"2129b276-6537-40de-b872-ac05e2ab8545\") " pod="openstack/neutron-7877fc45f6-htlwc" Oct 07 08:14:14 crc kubenswrapper[4875]: I1007 08:14:14.953762 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6578955fd5-6k89r" Oct 07 08:14:14 crc kubenswrapper[4875]: I1007 08:14:14.985531 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-7877fc45f6-htlwc" Oct 07 08:14:15 crc kubenswrapper[4875]: I1007 08:14:15.153098 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"df5f11ac-252a-4de8-9493-261081690646","Type":"ContainerStarted","Data":"103bdc5bcec2d24e42bc3cd01210b37f931c0b161f105d4f3f6af2bfc15dba6a"} Oct 07 08:14:15 crc kubenswrapper[4875]: I1007 08:14:15.165845 4875 generic.go:334] "Generic (PLEG): container finished" podID="7f159cb5-e32e-4e8d-acac-0ec48e2ab021" containerID="4826944efac2f6f4d2f7e0863faf272dd318eae91e1cff38e1e867ff37e136d0" exitCode=0 Oct 07 08:14:15 crc kubenswrapper[4875]: I1007 08:14:15.165987 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5cc8b5d5c5-nw28k" event={"ID":"7f159cb5-e32e-4e8d-acac-0ec48e2ab021","Type":"ContainerDied","Data":"4826944efac2f6f4d2f7e0863faf272dd318eae91e1cff38e1e867ff37e136d0"} Oct 07 08:14:15 crc kubenswrapper[4875]: I1007 08:14:15.166019 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5cc8b5d5c5-nw28k" event={"ID":"7f159cb5-e32e-4e8d-acac-0ec48e2ab021","Type":"ContainerStarted","Data":"512b151f52d69064f3c08408d8a222a435f44539aa736844cfb68f14aa76760c"} Oct 07 08:14:15 crc kubenswrapper[4875]: I1007 08:14:15.176729 4875 generic.go:334] "Generic (PLEG): container finished" podID="0149ac72-d21f-45a8-a322-a97d03c1d85a" containerID="28c73eda21e08d770864214760468a696f3c63bed8bc29efe198918213e75f60" exitCode=0 Oct 07 08:14:15 crc kubenswrapper[4875]: I1007 08:14:15.176758 4875 generic.go:334] "Generic (PLEG): container finished" podID="0149ac72-d21f-45a8-a322-a97d03c1d85a" containerID="39da007b9e9dd5e515680790376915507daa320030eab5508889ef386920526b" exitCode=2 Oct 07 08:14:15 crc kubenswrapper[4875]: I1007 08:14:15.176796 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0149ac72-d21f-45a8-a322-a97d03c1d85a","Type":"ContainerDied","Data":"28c73eda21e08d770864214760468a696f3c63bed8bc29efe198918213e75f60"} Oct 07 08:14:15 crc kubenswrapper[4875]: I1007 08:14:15.176821 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0149ac72-d21f-45a8-a322-a97d03c1d85a","Type":"ContainerDied","Data":"39da007b9e9dd5e515680790376915507daa320030eab5508889ef386920526b"} Oct 07 08:14:15 crc kubenswrapper[4875]: I1007 08:14:15.177993 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"fe3d321f-e7cf-44de-9419-b7e40191ba52","Type":"ContainerStarted","Data":"a0eb5a07d2b2a3435371e439ebb1990a5f617da338068acae36b90b893f325f7"} Oct 07 08:14:15 crc kubenswrapper[4875]: I1007 08:14:15.754671 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b0d35a4-cfa4-4c0a-8ca2-4db85aefc765" path="/var/lib/kubelet/pods/5b0d35a4-cfa4-4c0a-8ca2-4db85aefc765/volumes" Oct 07 08:14:15 crc kubenswrapper[4875]: I1007 08:14:15.813129 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-7877fc45f6-htlwc"] Oct 07 08:14:15 crc kubenswrapper[4875]: I1007 08:14:15.899577 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6578955fd5-6k89r"] Oct 07 08:14:15 crc kubenswrapper[4875]: I1007 08:14:15.910633 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5cc8b5d5c5-nw28k" Oct 07 08:14:15 crc kubenswrapper[4875]: W1007 08:14:15.915740 4875 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb243a457_ccd1_4c3d_b798_9924c6dfcf45.slice/crio-7ae4c7c744128a5e7f889669c93a7c0b54e55194d559793a03e441b2cc68abed WatchSource:0}: Error finding container 7ae4c7c744128a5e7f889669c93a7c0b54e55194d559793a03e441b2cc68abed: Status 404 returned error can't find the container with id 7ae4c7c744128a5e7f889669c93a7c0b54e55194d559793a03e441b2cc68abed Oct 07 08:14:16 crc kubenswrapper[4875]: I1007 08:14:16.035381 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7f159cb5-e32e-4e8d-acac-0ec48e2ab021-ovsdbserver-nb\") pod \"7f159cb5-e32e-4e8d-acac-0ec48e2ab021\" (UID: \"7f159cb5-e32e-4e8d-acac-0ec48e2ab021\") " Oct 07 08:14:16 crc kubenswrapper[4875]: I1007 08:14:16.036122 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7f159cb5-e32e-4e8d-acac-0ec48e2ab021-config\") pod \"7f159cb5-e32e-4e8d-acac-0ec48e2ab021\" (UID: \"7f159cb5-e32e-4e8d-acac-0ec48e2ab021\") " Oct 07 08:14:16 crc kubenswrapper[4875]: I1007 08:14:16.036188 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7f159cb5-e32e-4e8d-acac-0ec48e2ab021-dns-svc\") pod \"7f159cb5-e32e-4e8d-acac-0ec48e2ab021\" (UID: \"7f159cb5-e32e-4e8d-acac-0ec48e2ab021\") " Oct 07 08:14:16 crc kubenswrapper[4875]: I1007 08:14:16.036373 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7f159cb5-e32e-4e8d-acac-0ec48e2ab021-ovsdbserver-sb\") pod \"7f159cb5-e32e-4e8d-acac-0ec48e2ab021\" (UID: \"7f159cb5-e32e-4e8d-acac-0ec48e2ab021\") " Oct 07 08:14:16 crc kubenswrapper[4875]: I1007 08:14:16.036498 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/7f159cb5-e32e-4e8d-acac-0ec48e2ab021-dns-swift-storage-0\") pod \"7f159cb5-e32e-4e8d-acac-0ec48e2ab021\" (UID: \"7f159cb5-e32e-4e8d-acac-0ec48e2ab021\") " Oct 07 08:14:16 crc kubenswrapper[4875]: I1007 08:14:16.036572 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dtdbv\" (UniqueName: \"kubernetes.io/projected/7f159cb5-e32e-4e8d-acac-0ec48e2ab021-kube-api-access-dtdbv\") pod \"7f159cb5-e32e-4e8d-acac-0ec48e2ab021\" (UID: \"7f159cb5-e32e-4e8d-acac-0ec48e2ab021\") " Oct 07 08:14:16 crc kubenswrapper[4875]: I1007 08:14:16.049118 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7f159cb5-e32e-4e8d-acac-0ec48e2ab021-kube-api-access-dtdbv" (OuterVolumeSpecName: "kube-api-access-dtdbv") pod "7f159cb5-e32e-4e8d-acac-0ec48e2ab021" (UID: "7f159cb5-e32e-4e8d-acac-0ec48e2ab021"). InnerVolumeSpecName "kube-api-access-dtdbv". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 08:14:16 crc kubenswrapper[4875]: I1007 08:14:16.070902 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7f159cb5-e32e-4e8d-acac-0ec48e2ab021-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "7f159cb5-e32e-4e8d-acac-0ec48e2ab021" (UID: "7f159cb5-e32e-4e8d-acac-0ec48e2ab021"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 08:14:16 crc kubenswrapper[4875]: I1007 08:14:16.080494 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7f159cb5-e32e-4e8d-acac-0ec48e2ab021-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "7f159cb5-e32e-4e8d-acac-0ec48e2ab021" (UID: "7f159cb5-e32e-4e8d-acac-0ec48e2ab021"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 08:14:16 crc kubenswrapper[4875]: I1007 08:14:16.088954 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7f159cb5-e32e-4e8d-acac-0ec48e2ab021-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "7f159cb5-e32e-4e8d-acac-0ec48e2ab021" (UID: "7f159cb5-e32e-4e8d-acac-0ec48e2ab021"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 08:14:16 crc kubenswrapper[4875]: I1007 08:14:16.131463 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7f159cb5-e32e-4e8d-acac-0ec48e2ab021-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "7f159cb5-e32e-4e8d-acac-0ec48e2ab021" (UID: "7f159cb5-e32e-4e8d-acac-0ec48e2ab021"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 08:14:16 crc kubenswrapper[4875]: I1007 08:14:16.146426 4875 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7f159cb5-e32e-4e8d-acac-0ec48e2ab021-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 07 08:14:16 crc kubenswrapper[4875]: I1007 08:14:16.146459 4875 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7f159cb5-e32e-4e8d-acac-0ec48e2ab021-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 07 08:14:16 crc kubenswrapper[4875]: I1007 08:14:16.146468 4875 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7f159cb5-e32e-4e8d-acac-0ec48e2ab021-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 07 08:14:16 crc kubenswrapper[4875]: I1007 08:14:16.146477 4875 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/7f159cb5-e32e-4e8d-acac-0ec48e2ab021-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Oct 07 08:14:16 crc kubenswrapper[4875]: I1007 08:14:16.146492 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dtdbv\" (UniqueName: \"kubernetes.io/projected/7f159cb5-e32e-4e8d-acac-0ec48e2ab021-kube-api-access-dtdbv\") on node \"crc\" DevicePath \"\"" Oct 07 08:14:16 crc kubenswrapper[4875]: I1007 08:14:16.172922 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7f159cb5-e32e-4e8d-acac-0ec48e2ab021-config" (OuterVolumeSpecName: "config") pod "7f159cb5-e32e-4e8d-acac-0ec48e2ab021" (UID: "7f159cb5-e32e-4e8d-acac-0ec48e2ab021"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 08:14:16 crc kubenswrapper[4875]: I1007 08:14:16.204439 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-7877fc45f6-htlwc" event={"ID":"2129b276-6537-40de-b872-ac05e2ab8545","Type":"ContainerStarted","Data":"1551e682f2409479b3b6a1be5bdb3aa79810776167e8f0e81ceae50848229b5a"} Oct 07 08:14:16 crc kubenswrapper[4875]: I1007 08:14:16.211039 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"fe3d321f-e7cf-44de-9419-b7e40191ba52","Type":"ContainerStarted","Data":"757bca6721457bc333e537841d42024e97076d2b285a9bbd4c0162a3eee80b9d"} Oct 07 08:14:16 crc kubenswrapper[4875]: I1007 08:14:16.225642 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5cc8b5d5c5-nw28k" event={"ID":"7f159cb5-e32e-4e8d-acac-0ec48e2ab021","Type":"ContainerDied","Data":"512b151f52d69064f3c08408d8a222a435f44539aa736844cfb68f14aa76760c"} Oct 07 08:14:16 crc kubenswrapper[4875]: I1007 08:14:16.225698 4875 scope.go:117] "RemoveContainer" containerID="4826944efac2f6f4d2f7e0863faf272dd318eae91e1cff38e1e867ff37e136d0" Oct 07 08:14:16 crc kubenswrapper[4875]: I1007 08:14:16.225916 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5cc8b5d5c5-nw28k" Oct 07 08:14:16 crc kubenswrapper[4875]: I1007 08:14:16.243151 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6578955fd5-6k89r" event={"ID":"b243a457-ccd1-4c3d-b798-9924c6dfcf45","Type":"ContainerStarted","Data":"7ae4c7c744128a5e7f889669c93a7c0b54e55194d559793a03e441b2cc68abed"} Oct 07 08:14:16 crc kubenswrapper[4875]: I1007 08:14:16.253263 4875 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7f159cb5-e32e-4e8d-acac-0ec48e2ab021-config\") on node \"crc\" DevicePath \"\"" Oct 07 08:14:16 crc kubenswrapper[4875]: I1007 08:14:16.254323 4875 generic.go:334] "Generic (PLEG): container finished" podID="0149ac72-d21f-45a8-a322-a97d03c1d85a" containerID="2319590f78bee1333615b5e2d800305af8de4eaf7a85d56f583ba89474a8ca66" exitCode=0 Oct 07 08:14:16 crc kubenswrapper[4875]: I1007 08:14:16.254363 4875 generic.go:334] "Generic (PLEG): container finished" podID="0149ac72-d21f-45a8-a322-a97d03c1d85a" containerID="63c478e4e67c6d548105ff3e8393eb7b91f350c3bc1b76640a2274b246438439" exitCode=0 Oct 07 08:14:16 crc kubenswrapper[4875]: I1007 08:14:16.254391 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0149ac72-d21f-45a8-a322-a97d03c1d85a","Type":"ContainerDied","Data":"2319590f78bee1333615b5e2d800305af8de4eaf7a85d56f583ba89474a8ca66"} Oct 07 08:14:16 crc kubenswrapper[4875]: I1007 08:14:16.254422 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0149ac72-d21f-45a8-a322-a97d03c1d85a","Type":"ContainerDied","Data":"63c478e4e67c6d548105ff3e8393eb7b91f350c3bc1b76640a2274b246438439"} Oct 07 08:14:16 crc kubenswrapper[4875]: I1007 08:14:16.318964 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5cc8b5d5c5-nw28k"] Oct 07 08:14:16 crc kubenswrapper[4875]: I1007 08:14:16.327578 4875 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5cc8b5d5c5-nw28k"] Oct 07 08:14:16 crc kubenswrapper[4875]: I1007 08:14:16.505363 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 07 08:14:16 crc kubenswrapper[4875]: I1007 08:14:16.563802 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0149ac72-d21f-45a8-a322-a97d03c1d85a-log-httpd\") pod \"0149ac72-d21f-45a8-a322-a97d03c1d85a\" (UID: \"0149ac72-d21f-45a8-a322-a97d03c1d85a\") " Oct 07 08:14:16 crc kubenswrapper[4875]: I1007 08:14:16.563868 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0149ac72-d21f-45a8-a322-a97d03c1d85a-scripts\") pod \"0149ac72-d21f-45a8-a322-a97d03c1d85a\" (UID: \"0149ac72-d21f-45a8-a322-a97d03c1d85a\") " Oct 07 08:14:16 crc kubenswrapper[4875]: I1007 08:14:16.563990 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sd876\" (UniqueName: \"kubernetes.io/projected/0149ac72-d21f-45a8-a322-a97d03c1d85a-kube-api-access-sd876\") pod \"0149ac72-d21f-45a8-a322-a97d03c1d85a\" (UID: \"0149ac72-d21f-45a8-a322-a97d03c1d85a\") " Oct 07 08:14:16 crc kubenswrapper[4875]: I1007 08:14:16.564080 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0149ac72-d21f-45a8-a322-a97d03c1d85a-config-data\") pod \"0149ac72-d21f-45a8-a322-a97d03c1d85a\" (UID: \"0149ac72-d21f-45a8-a322-a97d03c1d85a\") " Oct 07 08:14:16 crc kubenswrapper[4875]: I1007 08:14:16.564127 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0149ac72-d21f-45a8-a322-a97d03c1d85a-run-httpd\") pod \"0149ac72-d21f-45a8-a322-a97d03c1d85a\" (UID: \"0149ac72-d21f-45a8-a322-a97d03c1d85a\") " Oct 07 08:14:16 crc kubenswrapper[4875]: I1007 08:14:16.564258 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/0149ac72-d21f-45a8-a322-a97d03c1d85a-sg-core-conf-yaml\") pod \"0149ac72-d21f-45a8-a322-a97d03c1d85a\" (UID: \"0149ac72-d21f-45a8-a322-a97d03c1d85a\") " Oct 07 08:14:16 crc kubenswrapper[4875]: I1007 08:14:16.564371 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0149ac72-d21f-45a8-a322-a97d03c1d85a-combined-ca-bundle\") pod \"0149ac72-d21f-45a8-a322-a97d03c1d85a\" (UID: \"0149ac72-d21f-45a8-a322-a97d03c1d85a\") " Oct 07 08:14:16 crc kubenswrapper[4875]: I1007 08:14:16.569868 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0149ac72-d21f-45a8-a322-a97d03c1d85a-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "0149ac72-d21f-45a8-a322-a97d03c1d85a" (UID: "0149ac72-d21f-45a8-a322-a97d03c1d85a"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 08:14:16 crc kubenswrapper[4875]: I1007 08:14:16.571972 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0149ac72-d21f-45a8-a322-a97d03c1d85a-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "0149ac72-d21f-45a8-a322-a97d03c1d85a" (UID: "0149ac72-d21f-45a8-a322-a97d03c1d85a"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 08:14:16 crc kubenswrapper[4875]: I1007 08:14:16.592090 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0149ac72-d21f-45a8-a322-a97d03c1d85a-scripts" (OuterVolumeSpecName: "scripts") pod "0149ac72-d21f-45a8-a322-a97d03c1d85a" (UID: "0149ac72-d21f-45a8-a322-a97d03c1d85a"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:14:16 crc kubenswrapper[4875]: I1007 08:14:16.644398 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0149ac72-d21f-45a8-a322-a97d03c1d85a-kube-api-access-sd876" (OuterVolumeSpecName: "kube-api-access-sd876") pod "0149ac72-d21f-45a8-a322-a97d03c1d85a" (UID: "0149ac72-d21f-45a8-a322-a97d03c1d85a"). InnerVolumeSpecName "kube-api-access-sd876". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 08:14:16 crc kubenswrapper[4875]: I1007 08:14:16.667351 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sd876\" (UniqueName: \"kubernetes.io/projected/0149ac72-d21f-45a8-a322-a97d03c1d85a-kube-api-access-sd876\") on node \"crc\" DevicePath \"\"" Oct 07 08:14:16 crc kubenswrapper[4875]: I1007 08:14:16.667692 4875 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0149ac72-d21f-45a8-a322-a97d03c1d85a-run-httpd\") on node \"crc\" DevicePath \"\"" Oct 07 08:14:16 crc kubenswrapper[4875]: I1007 08:14:16.667928 4875 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0149ac72-d21f-45a8-a322-a97d03c1d85a-log-httpd\") on node \"crc\" DevicePath \"\"" Oct 07 08:14:16 crc kubenswrapper[4875]: I1007 08:14:16.668162 4875 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0149ac72-d21f-45a8-a322-a97d03c1d85a-scripts\") on node \"crc\" DevicePath \"\"" Oct 07 08:14:16 crc kubenswrapper[4875]: I1007 08:14:16.761960 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0149ac72-d21f-45a8-a322-a97d03c1d85a-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "0149ac72-d21f-45a8-a322-a97d03c1d85a" (UID: "0149ac72-d21f-45a8-a322-a97d03c1d85a"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:14:16 crc kubenswrapper[4875]: I1007 08:14:16.770018 4875 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/0149ac72-d21f-45a8-a322-a97d03c1d85a-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Oct 07 08:14:16 crc kubenswrapper[4875]: I1007 08:14:16.804179 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0149ac72-d21f-45a8-a322-a97d03c1d85a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0149ac72-d21f-45a8-a322-a97d03c1d85a" (UID: "0149ac72-d21f-45a8-a322-a97d03c1d85a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:14:16 crc kubenswrapper[4875]: I1007 08:14:16.878417 4875 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0149ac72-d21f-45a8-a322-a97d03c1d85a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 08:14:16 crc kubenswrapper[4875]: I1007 08:14:16.914067 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0149ac72-d21f-45a8-a322-a97d03c1d85a-config-data" (OuterVolumeSpecName: "config-data") pod "0149ac72-d21f-45a8-a322-a97d03c1d85a" (UID: "0149ac72-d21f-45a8-a322-a97d03c1d85a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:14:16 crc kubenswrapper[4875]: I1007 08:14:16.981280 4875 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0149ac72-d21f-45a8-a322-a97d03c1d85a-config-data\") on node \"crc\" DevicePath \"\"" Oct 07 08:14:17 crc kubenswrapper[4875]: I1007 08:14:17.046103 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-d454b8786-74tns" Oct 07 08:14:17 crc kubenswrapper[4875]: I1007 08:14:17.209164 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-d454b8786-74tns" Oct 07 08:14:17 crc kubenswrapper[4875]: I1007 08:14:17.272459 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-6f66dddcf4-7856h"] Oct 07 08:14:17 crc kubenswrapper[4875]: I1007 08:14:17.272692 4875 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-6f66dddcf4-7856h" podUID="d86ba6fc-0914-42b4-a080-94a2173841bf" containerName="barbican-api-log" containerID="cri-o://b9a61dd90d3fd8b7295bc98fd42493a7bd87aa633a941b86484df5f414ea2035" gracePeriod=30 Oct 07 08:14:17 crc kubenswrapper[4875]: I1007 08:14:17.273167 4875 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-6f66dddcf4-7856h" podUID="d86ba6fc-0914-42b4-a080-94a2173841bf" containerName="barbican-api" containerID="cri-o://ad026997af1e389ced04c4015430116b8376e3f10ca6b554005afca59ac16da2" gracePeriod=30 Oct 07 08:14:17 crc kubenswrapper[4875]: I1007 08:14:17.429232 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0149ac72-d21f-45a8-a322-a97d03c1d85a","Type":"ContainerDied","Data":"16f61710ee12bcc24a3d18d4bb7a34abcde97473f876851db742525ccc252572"} Oct 07 08:14:17 crc kubenswrapper[4875]: I1007 08:14:17.429640 4875 scope.go:117] "RemoveContainer" containerID="28c73eda21e08d770864214760468a696f3c63bed8bc29efe198918213e75f60" Oct 07 08:14:17 crc kubenswrapper[4875]: I1007 08:14:17.429264 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 07 08:14:17 crc kubenswrapper[4875]: I1007 08:14:17.450500 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"fe3d321f-e7cf-44de-9419-b7e40191ba52","Type":"ContainerStarted","Data":"5c66dbd662b1ea390e464faa8f0b52756dda9d6a3717be12fb70a4ed197cd536"} Oct 07 08:14:17 crc kubenswrapper[4875]: I1007 08:14:17.450666 4875 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="fe3d321f-e7cf-44de-9419-b7e40191ba52" containerName="cinder-api-log" containerID="cri-o://757bca6721457bc333e537841d42024e97076d2b285a9bbd4c0162a3eee80b9d" gracePeriod=30 Oct 07 08:14:17 crc kubenswrapper[4875]: I1007 08:14:17.450926 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Oct 07 08:14:17 crc kubenswrapper[4875]: I1007 08:14:17.450940 4875 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="fe3d321f-e7cf-44de-9419-b7e40191ba52" containerName="cinder-api" containerID="cri-o://5c66dbd662b1ea390e464faa8f0b52756dda9d6a3717be12fb70a4ed197cd536" gracePeriod=30 Oct 07 08:14:17 crc kubenswrapper[4875]: I1007 08:14:17.481349 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-7877fc45f6-htlwc" event={"ID":"2129b276-6537-40de-b872-ac05e2ab8545","Type":"ContainerStarted","Data":"501dc74d741d9300fa6e4807d5c33c2ec669f39513ddf518b0053b24ce213cee"} Oct 07 08:14:17 crc kubenswrapper[4875]: I1007 08:14:17.481402 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-7877fc45f6-htlwc" event={"ID":"2129b276-6537-40de-b872-ac05e2ab8545","Type":"ContainerStarted","Data":"ff09e4f768783f0fed861e45b6062891be0bc258db1014051f91d46dcf9a004e"} Oct 07 08:14:17 crc kubenswrapper[4875]: I1007 08:14:17.482195 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-7877fc45f6-htlwc" Oct 07 08:14:17 crc kubenswrapper[4875]: I1007 08:14:17.518150 4875 generic.go:334] "Generic (PLEG): container finished" podID="b243a457-ccd1-4c3d-b798-9924c6dfcf45" containerID="210bff39d1d222ec1fcdd2b15e45b62612aed51d11fbf4892bdf0fb6bdd8c7cb" exitCode=0 Oct 07 08:14:17 crc kubenswrapper[4875]: I1007 08:14:17.518789 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6578955fd5-6k89r" event={"ID":"b243a457-ccd1-4c3d-b798-9924c6dfcf45","Type":"ContainerDied","Data":"210bff39d1d222ec1fcdd2b15e45b62612aed51d11fbf4892bdf0fb6bdd8c7cb"} Oct 07 08:14:17 crc kubenswrapper[4875]: I1007 08:14:17.523633 4875 scope.go:117] "RemoveContainer" containerID="39da007b9e9dd5e515680790376915507daa320030eab5508889ef386920526b" Oct 07 08:14:17 crc kubenswrapper[4875]: I1007 08:14:17.531758 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=5.531739027 podStartE2EDuration="5.531739027s" podCreationTimestamp="2025-10-07 08:14:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 08:14:17.501249838 +0000 UTC m=+1082.461020381" watchObservedRunningTime="2025-10-07 08:14:17.531739027 +0000 UTC m=+1082.491509570" Oct 07 08:14:17 crc kubenswrapper[4875]: I1007 08:14:17.534949 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 07 08:14:17 crc kubenswrapper[4875]: I1007 08:14:17.622996 4875 scope.go:117] "RemoveContainer" containerID="2319590f78bee1333615b5e2d800305af8de4eaf7a85d56f583ba89474a8ca66" Oct 07 08:14:17 crc kubenswrapper[4875]: I1007 08:14:17.637902 4875 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Oct 07 08:14:17 crc kubenswrapper[4875]: I1007 08:14:17.657357 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-7877fc45f6-htlwc" podStartSLOduration=3.6573358320000002 podStartE2EDuration="3.657335832s" podCreationTimestamp="2025-10-07 08:14:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 08:14:17.581933381 +0000 UTC m=+1082.541703934" watchObservedRunningTime="2025-10-07 08:14:17.657335832 +0000 UTC m=+1082.617106375" Oct 07 08:14:17 crc kubenswrapper[4875]: I1007 08:14:17.738287 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0149ac72-d21f-45a8-a322-a97d03c1d85a" path="/var/lib/kubelet/pods/0149ac72-d21f-45a8-a322-a97d03c1d85a/volumes" Oct 07 08:14:17 crc kubenswrapper[4875]: I1007 08:14:17.739011 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7f159cb5-e32e-4e8d-acac-0ec48e2ab021" path="/var/lib/kubelet/pods/7f159cb5-e32e-4e8d-acac-0ec48e2ab021/volumes" Oct 07 08:14:17 crc kubenswrapper[4875]: I1007 08:14:17.739522 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Oct 07 08:14:17 crc kubenswrapper[4875]: E1007 08:14:17.739801 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0149ac72-d21f-45a8-a322-a97d03c1d85a" containerName="proxy-httpd" Oct 07 08:14:17 crc kubenswrapper[4875]: I1007 08:14:17.739811 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="0149ac72-d21f-45a8-a322-a97d03c1d85a" containerName="proxy-httpd" Oct 07 08:14:17 crc kubenswrapper[4875]: E1007 08:14:17.739824 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0149ac72-d21f-45a8-a322-a97d03c1d85a" containerName="ceilometer-central-agent" Oct 07 08:14:17 crc kubenswrapper[4875]: I1007 08:14:17.739831 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="0149ac72-d21f-45a8-a322-a97d03c1d85a" containerName="ceilometer-central-agent" Oct 07 08:14:17 crc kubenswrapper[4875]: E1007 08:14:17.739841 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0149ac72-d21f-45a8-a322-a97d03c1d85a" containerName="sg-core" Oct 07 08:14:17 crc kubenswrapper[4875]: I1007 08:14:17.739847 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="0149ac72-d21f-45a8-a322-a97d03c1d85a" containerName="sg-core" Oct 07 08:14:17 crc kubenswrapper[4875]: E1007 08:14:17.752997 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0149ac72-d21f-45a8-a322-a97d03c1d85a" containerName="ceilometer-notification-agent" Oct 07 08:14:17 crc kubenswrapper[4875]: I1007 08:14:17.753311 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="0149ac72-d21f-45a8-a322-a97d03c1d85a" containerName="ceilometer-notification-agent" Oct 07 08:14:17 crc kubenswrapper[4875]: E1007 08:14:17.753371 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7f159cb5-e32e-4e8d-acac-0ec48e2ab021" containerName="init" Oct 07 08:14:17 crc kubenswrapper[4875]: I1007 08:14:17.753378 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="7f159cb5-e32e-4e8d-acac-0ec48e2ab021" containerName="init" Oct 07 08:14:17 crc kubenswrapper[4875]: I1007 08:14:17.753812 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="0149ac72-d21f-45a8-a322-a97d03c1d85a" containerName="proxy-httpd" Oct 07 08:14:17 crc kubenswrapper[4875]: I1007 08:14:17.753845 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="0149ac72-d21f-45a8-a322-a97d03c1d85a" containerName="ceilometer-notification-agent" Oct 07 08:14:17 crc kubenswrapper[4875]: I1007 08:14:17.753863 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="7f159cb5-e32e-4e8d-acac-0ec48e2ab021" containerName="init" Oct 07 08:14:17 crc kubenswrapper[4875]: I1007 08:14:17.753895 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="0149ac72-d21f-45a8-a322-a97d03c1d85a" containerName="sg-core" Oct 07 08:14:17 crc kubenswrapper[4875]: I1007 08:14:17.753905 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="0149ac72-d21f-45a8-a322-a97d03c1d85a" containerName="ceilometer-central-agent" Oct 07 08:14:17 crc kubenswrapper[4875]: I1007 08:14:17.755948 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 07 08:14:17 crc kubenswrapper[4875]: I1007 08:14:17.765326 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Oct 07 08:14:17 crc kubenswrapper[4875]: I1007 08:14:17.766020 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 07 08:14:17 crc kubenswrapper[4875]: I1007 08:14:17.771105 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Oct 07 08:14:17 crc kubenswrapper[4875]: I1007 08:14:17.870204 4875 scope.go:117] "RemoveContainer" containerID="63c478e4e67c6d548105ff3e8393eb7b91f350c3bc1b76640a2274b246438439" Oct 07 08:14:17 crc kubenswrapper[4875]: I1007 08:14:17.880711 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/9c287068-593f-426c-b370-94917005d8e9-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"9c287068-593f-426c-b370-94917005d8e9\") " pod="openstack/ceilometer-0" Oct 07 08:14:17 crc kubenswrapper[4875]: I1007 08:14:17.900514 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9c287068-593f-426c-b370-94917005d8e9-run-httpd\") pod \"ceilometer-0\" (UID: \"9c287068-593f-426c-b370-94917005d8e9\") " pod="openstack/ceilometer-0" Oct 07 08:14:17 crc kubenswrapper[4875]: I1007 08:14:17.934470 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9c287068-593f-426c-b370-94917005d8e9-log-httpd\") pod \"ceilometer-0\" (UID: \"9c287068-593f-426c-b370-94917005d8e9\") " pod="openstack/ceilometer-0" Oct 07 08:14:17 crc kubenswrapper[4875]: I1007 08:14:17.934828 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pggvw\" (UniqueName: \"kubernetes.io/projected/9c287068-593f-426c-b370-94917005d8e9-kube-api-access-pggvw\") pod \"ceilometer-0\" (UID: \"9c287068-593f-426c-b370-94917005d8e9\") " pod="openstack/ceilometer-0" Oct 07 08:14:17 crc kubenswrapper[4875]: I1007 08:14:17.935000 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9c287068-593f-426c-b370-94917005d8e9-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"9c287068-593f-426c-b370-94917005d8e9\") " pod="openstack/ceilometer-0" Oct 07 08:14:17 crc kubenswrapper[4875]: I1007 08:14:17.935240 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9c287068-593f-426c-b370-94917005d8e9-config-data\") pod \"ceilometer-0\" (UID: \"9c287068-593f-426c-b370-94917005d8e9\") " pod="openstack/ceilometer-0" Oct 07 08:14:17 crc kubenswrapper[4875]: I1007 08:14:17.952384 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9c287068-593f-426c-b370-94917005d8e9-scripts\") pod \"ceilometer-0\" (UID: \"9c287068-593f-426c-b370-94917005d8e9\") " pod="openstack/ceilometer-0" Oct 07 08:14:18 crc kubenswrapper[4875]: I1007 08:14:18.056867 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/9c287068-593f-426c-b370-94917005d8e9-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"9c287068-593f-426c-b370-94917005d8e9\") " pod="openstack/ceilometer-0" Oct 07 08:14:18 crc kubenswrapper[4875]: I1007 08:14:18.056974 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9c287068-593f-426c-b370-94917005d8e9-run-httpd\") pod \"ceilometer-0\" (UID: \"9c287068-593f-426c-b370-94917005d8e9\") " pod="openstack/ceilometer-0" Oct 07 08:14:18 crc kubenswrapper[4875]: I1007 08:14:18.057011 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9c287068-593f-426c-b370-94917005d8e9-log-httpd\") pod \"ceilometer-0\" (UID: \"9c287068-593f-426c-b370-94917005d8e9\") " pod="openstack/ceilometer-0" Oct 07 08:14:18 crc kubenswrapper[4875]: I1007 08:14:18.057049 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pggvw\" (UniqueName: \"kubernetes.io/projected/9c287068-593f-426c-b370-94917005d8e9-kube-api-access-pggvw\") pod \"ceilometer-0\" (UID: \"9c287068-593f-426c-b370-94917005d8e9\") " pod="openstack/ceilometer-0" Oct 07 08:14:18 crc kubenswrapper[4875]: I1007 08:14:18.057079 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9c287068-593f-426c-b370-94917005d8e9-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"9c287068-593f-426c-b370-94917005d8e9\") " pod="openstack/ceilometer-0" Oct 07 08:14:18 crc kubenswrapper[4875]: I1007 08:14:18.057156 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9c287068-593f-426c-b370-94917005d8e9-config-data\") pod \"ceilometer-0\" (UID: \"9c287068-593f-426c-b370-94917005d8e9\") " pod="openstack/ceilometer-0" Oct 07 08:14:18 crc kubenswrapper[4875]: I1007 08:14:18.057192 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9c287068-593f-426c-b370-94917005d8e9-scripts\") pod \"ceilometer-0\" (UID: \"9c287068-593f-426c-b370-94917005d8e9\") " pod="openstack/ceilometer-0" Oct 07 08:14:18 crc kubenswrapper[4875]: I1007 08:14:18.058464 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9c287068-593f-426c-b370-94917005d8e9-log-httpd\") pod \"ceilometer-0\" (UID: \"9c287068-593f-426c-b370-94917005d8e9\") " pod="openstack/ceilometer-0" Oct 07 08:14:18 crc kubenswrapper[4875]: I1007 08:14:18.058792 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9c287068-593f-426c-b370-94917005d8e9-run-httpd\") pod \"ceilometer-0\" (UID: \"9c287068-593f-426c-b370-94917005d8e9\") " pod="openstack/ceilometer-0" Oct 07 08:14:18 crc kubenswrapper[4875]: I1007 08:14:18.065260 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9c287068-593f-426c-b370-94917005d8e9-scripts\") pod \"ceilometer-0\" (UID: \"9c287068-593f-426c-b370-94917005d8e9\") " pod="openstack/ceilometer-0" Oct 07 08:14:18 crc kubenswrapper[4875]: I1007 08:14:18.071436 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9c287068-593f-426c-b370-94917005d8e9-config-data\") pod \"ceilometer-0\" (UID: \"9c287068-593f-426c-b370-94917005d8e9\") " pod="openstack/ceilometer-0" Oct 07 08:14:18 crc kubenswrapper[4875]: I1007 08:14:18.071622 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9c287068-593f-426c-b370-94917005d8e9-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"9c287068-593f-426c-b370-94917005d8e9\") " pod="openstack/ceilometer-0" Oct 07 08:14:18 crc kubenswrapper[4875]: I1007 08:14:18.071996 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/9c287068-593f-426c-b370-94917005d8e9-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"9c287068-593f-426c-b370-94917005d8e9\") " pod="openstack/ceilometer-0" Oct 07 08:14:18 crc kubenswrapper[4875]: I1007 08:14:18.087817 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pggvw\" (UniqueName: \"kubernetes.io/projected/9c287068-593f-426c-b370-94917005d8e9-kube-api-access-pggvw\") pod \"ceilometer-0\" (UID: \"9c287068-593f-426c-b370-94917005d8e9\") " pod="openstack/ceilometer-0" Oct 07 08:14:18 crc kubenswrapper[4875]: I1007 08:14:18.206743 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 07 08:14:18 crc kubenswrapper[4875]: I1007 08:14:18.572167 4875 generic.go:334] "Generic (PLEG): container finished" podID="fe3d321f-e7cf-44de-9419-b7e40191ba52" containerID="5c66dbd662b1ea390e464faa8f0b52756dda9d6a3717be12fb70a4ed197cd536" exitCode=0 Oct 07 08:14:18 crc kubenswrapper[4875]: I1007 08:14:18.572661 4875 generic.go:334] "Generic (PLEG): container finished" podID="fe3d321f-e7cf-44de-9419-b7e40191ba52" containerID="757bca6721457bc333e537841d42024e97076d2b285a9bbd4c0162a3eee80b9d" exitCode=143 Oct 07 08:14:18 crc kubenswrapper[4875]: I1007 08:14:18.572735 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"fe3d321f-e7cf-44de-9419-b7e40191ba52","Type":"ContainerDied","Data":"5c66dbd662b1ea390e464faa8f0b52756dda9d6a3717be12fb70a4ed197cd536"} Oct 07 08:14:18 crc kubenswrapper[4875]: I1007 08:14:18.572764 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"fe3d321f-e7cf-44de-9419-b7e40191ba52","Type":"ContainerDied","Data":"757bca6721457bc333e537841d42024e97076d2b285a9bbd4c0162a3eee80b9d"} Oct 07 08:14:18 crc kubenswrapper[4875]: I1007 08:14:18.592145 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"df5f11ac-252a-4de8-9493-261081690646","Type":"ContainerStarted","Data":"5898bc02da32e377a8ac0ea1bdd26f7653e9ef37c05899016ba2fef10d88c798"} Oct 07 08:14:18 crc kubenswrapper[4875]: I1007 08:14:18.601996 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-5f5f877689-scxcf"] Oct 07 08:14:18 crc kubenswrapper[4875]: I1007 08:14:18.609398 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-5f5f877689-scxcf" Oct 07 08:14:18 crc kubenswrapper[4875]: I1007 08:14:18.617655 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-internal-svc" Oct 07 08:14:18 crc kubenswrapper[4875]: I1007 08:14:18.617839 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-public-svc" Oct 07 08:14:18 crc kubenswrapper[4875]: I1007 08:14:18.618926 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6578955fd5-6k89r" event={"ID":"b243a457-ccd1-4c3d-b798-9924c6dfcf45","Type":"ContainerStarted","Data":"897fb7517eb2debc16b8c39a152333b706dd69e4dfabbeb58ea46b519c6cea95"} Oct 07 08:14:18 crc kubenswrapper[4875]: I1007 08:14:18.620119 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-6578955fd5-6k89r" Oct 07 08:14:18 crc kubenswrapper[4875]: I1007 08:14:18.629310 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-5f5f877689-scxcf"] Oct 07 08:14:18 crc kubenswrapper[4875]: I1007 08:14:18.647644 4875 generic.go:334] "Generic (PLEG): container finished" podID="d86ba6fc-0914-42b4-a080-94a2173841bf" containerID="b9a61dd90d3fd8b7295bc98fd42493a7bd87aa633a941b86484df5f414ea2035" exitCode=143 Oct 07 08:14:18 crc kubenswrapper[4875]: I1007 08:14:18.647729 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-6f66dddcf4-7856h" event={"ID":"d86ba6fc-0914-42b4-a080-94a2173841bf","Type":"ContainerDied","Data":"b9a61dd90d3fd8b7295bc98fd42493a7bd87aa633a941b86484df5f414ea2035"} Oct 07 08:14:18 crc kubenswrapper[4875]: I1007 08:14:18.660074 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-6578955fd5-6k89r" podStartSLOduration=4.660055804 podStartE2EDuration="4.660055804s" podCreationTimestamp="2025-10-07 08:14:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 08:14:18.657263022 +0000 UTC m=+1083.617033565" watchObservedRunningTime="2025-10-07 08:14:18.660055804 +0000 UTC m=+1083.619826347" Oct 07 08:14:18 crc kubenswrapper[4875]: I1007 08:14:18.669910 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a42b578e-6f65-40c9-94f4-2b843c305470-combined-ca-bundle\") pod \"neutron-5f5f877689-scxcf\" (UID: \"a42b578e-6f65-40c9-94f4-2b843c305470\") " pod="openstack/neutron-5f5f877689-scxcf" Oct 07 08:14:18 crc kubenswrapper[4875]: I1007 08:14:18.669997 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a42b578e-6f65-40c9-94f4-2b843c305470-internal-tls-certs\") pod \"neutron-5f5f877689-scxcf\" (UID: \"a42b578e-6f65-40c9-94f4-2b843c305470\") " pod="openstack/neutron-5f5f877689-scxcf" Oct 07 08:14:18 crc kubenswrapper[4875]: I1007 08:14:18.670028 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/a42b578e-6f65-40c9-94f4-2b843c305470-httpd-config\") pod \"neutron-5f5f877689-scxcf\" (UID: \"a42b578e-6f65-40c9-94f4-2b843c305470\") " pod="openstack/neutron-5f5f877689-scxcf" Oct 07 08:14:18 crc kubenswrapper[4875]: I1007 08:14:18.670047 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fwggp\" (UniqueName: \"kubernetes.io/projected/a42b578e-6f65-40c9-94f4-2b843c305470-kube-api-access-fwggp\") pod \"neutron-5f5f877689-scxcf\" (UID: \"a42b578e-6f65-40c9-94f4-2b843c305470\") " pod="openstack/neutron-5f5f877689-scxcf" Oct 07 08:14:18 crc kubenswrapper[4875]: I1007 08:14:18.670113 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a42b578e-6f65-40c9-94f4-2b843c305470-public-tls-certs\") pod \"neutron-5f5f877689-scxcf\" (UID: \"a42b578e-6f65-40c9-94f4-2b843c305470\") " pod="openstack/neutron-5f5f877689-scxcf" Oct 07 08:14:18 crc kubenswrapper[4875]: I1007 08:14:18.670152 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/a42b578e-6f65-40c9-94f4-2b843c305470-config\") pod \"neutron-5f5f877689-scxcf\" (UID: \"a42b578e-6f65-40c9-94f4-2b843c305470\") " pod="openstack/neutron-5f5f877689-scxcf" Oct 07 08:14:18 crc kubenswrapper[4875]: I1007 08:14:18.670208 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/a42b578e-6f65-40c9-94f4-2b843c305470-ovndb-tls-certs\") pod \"neutron-5f5f877689-scxcf\" (UID: \"a42b578e-6f65-40c9-94f4-2b843c305470\") " pod="openstack/neutron-5f5f877689-scxcf" Oct 07 08:14:18 crc kubenswrapper[4875]: I1007 08:14:18.776981 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a42b578e-6f65-40c9-94f4-2b843c305470-combined-ca-bundle\") pod \"neutron-5f5f877689-scxcf\" (UID: \"a42b578e-6f65-40c9-94f4-2b843c305470\") " pod="openstack/neutron-5f5f877689-scxcf" Oct 07 08:14:18 crc kubenswrapper[4875]: I1007 08:14:18.777121 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a42b578e-6f65-40c9-94f4-2b843c305470-internal-tls-certs\") pod \"neutron-5f5f877689-scxcf\" (UID: \"a42b578e-6f65-40c9-94f4-2b843c305470\") " pod="openstack/neutron-5f5f877689-scxcf" Oct 07 08:14:18 crc kubenswrapper[4875]: I1007 08:14:18.777154 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/a42b578e-6f65-40c9-94f4-2b843c305470-httpd-config\") pod \"neutron-5f5f877689-scxcf\" (UID: \"a42b578e-6f65-40c9-94f4-2b843c305470\") " pod="openstack/neutron-5f5f877689-scxcf" Oct 07 08:14:18 crc kubenswrapper[4875]: I1007 08:14:18.777190 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fwggp\" (UniqueName: \"kubernetes.io/projected/a42b578e-6f65-40c9-94f4-2b843c305470-kube-api-access-fwggp\") pod \"neutron-5f5f877689-scxcf\" (UID: \"a42b578e-6f65-40c9-94f4-2b843c305470\") " pod="openstack/neutron-5f5f877689-scxcf" Oct 07 08:14:18 crc kubenswrapper[4875]: I1007 08:14:18.777278 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a42b578e-6f65-40c9-94f4-2b843c305470-public-tls-certs\") pod \"neutron-5f5f877689-scxcf\" (UID: \"a42b578e-6f65-40c9-94f4-2b843c305470\") " pod="openstack/neutron-5f5f877689-scxcf" Oct 07 08:14:18 crc kubenswrapper[4875]: I1007 08:14:18.777352 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/a42b578e-6f65-40c9-94f4-2b843c305470-config\") pod \"neutron-5f5f877689-scxcf\" (UID: \"a42b578e-6f65-40c9-94f4-2b843c305470\") " pod="openstack/neutron-5f5f877689-scxcf" Oct 07 08:14:18 crc kubenswrapper[4875]: I1007 08:14:18.777472 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/a42b578e-6f65-40c9-94f4-2b843c305470-ovndb-tls-certs\") pod \"neutron-5f5f877689-scxcf\" (UID: \"a42b578e-6f65-40c9-94f4-2b843c305470\") " pod="openstack/neutron-5f5f877689-scxcf" Oct 07 08:14:18 crc kubenswrapper[4875]: I1007 08:14:18.792323 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/a42b578e-6f65-40c9-94f4-2b843c305470-ovndb-tls-certs\") pod \"neutron-5f5f877689-scxcf\" (UID: \"a42b578e-6f65-40c9-94f4-2b843c305470\") " pod="openstack/neutron-5f5f877689-scxcf" Oct 07 08:14:18 crc kubenswrapper[4875]: I1007 08:14:18.796266 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 07 08:14:18 crc kubenswrapper[4875]: I1007 08:14:18.797413 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a42b578e-6f65-40c9-94f4-2b843c305470-internal-tls-certs\") pod \"neutron-5f5f877689-scxcf\" (UID: \"a42b578e-6f65-40c9-94f4-2b843c305470\") " pod="openstack/neutron-5f5f877689-scxcf" Oct 07 08:14:18 crc kubenswrapper[4875]: I1007 08:14:18.798568 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a42b578e-6f65-40c9-94f4-2b843c305470-public-tls-certs\") pod \"neutron-5f5f877689-scxcf\" (UID: \"a42b578e-6f65-40c9-94f4-2b843c305470\") " pod="openstack/neutron-5f5f877689-scxcf" Oct 07 08:14:18 crc kubenswrapper[4875]: I1007 08:14:18.799544 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/a42b578e-6f65-40c9-94f4-2b843c305470-config\") pod \"neutron-5f5f877689-scxcf\" (UID: \"a42b578e-6f65-40c9-94f4-2b843c305470\") " pod="openstack/neutron-5f5f877689-scxcf" Oct 07 08:14:18 crc kubenswrapper[4875]: I1007 08:14:18.800350 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/a42b578e-6f65-40c9-94f4-2b843c305470-httpd-config\") pod \"neutron-5f5f877689-scxcf\" (UID: \"a42b578e-6f65-40c9-94f4-2b843c305470\") " pod="openstack/neutron-5f5f877689-scxcf" Oct 07 08:14:18 crc kubenswrapper[4875]: I1007 08:14:18.803157 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a42b578e-6f65-40c9-94f4-2b843c305470-combined-ca-bundle\") pod \"neutron-5f5f877689-scxcf\" (UID: \"a42b578e-6f65-40c9-94f4-2b843c305470\") " pod="openstack/neutron-5f5f877689-scxcf" Oct 07 08:14:18 crc kubenswrapper[4875]: I1007 08:14:18.808305 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fwggp\" (UniqueName: \"kubernetes.io/projected/a42b578e-6f65-40c9-94f4-2b843c305470-kube-api-access-fwggp\") pod \"neutron-5f5f877689-scxcf\" (UID: \"a42b578e-6f65-40c9-94f4-2b843c305470\") " pod="openstack/neutron-5f5f877689-scxcf" Oct 07 08:14:18 crc kubenswrapper[4875]: I1007 08:14:18.959411 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-5f5f877689-scxcf" Oct 07 08:14:19 crc kubenswrapper[4875]: I1007 08:14:19.079111 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Oct 07 08:14:19 crc kubenswrapper[4875]: I1007 08:14:19.190539 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/fe3d321f-e7cf-44de-9419-b7e40191ba52-config-data-custom\") pod \"fe3d321f-e7cf-44de-9419-b7e40191ba52\" (UID: \"fe3d321f-e7cf-44de-9419-b7e40191ba52\") " Oct 07 08:14:19 crc kubenswrapper[4875]: I1007 08:14:19.191000 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/fe3d321f-e7cf-44de-9419-b7e40191ba52-etc-machine-id\") pod \"fe3d321f-e7cf-44de-9419-b7e40191ba52\" (UID: \"fe3d321f-e7cf-44de-9419-b7e40191ba52\") " Oct 07 08:14:19 crc kubenswrapper[4875]: I1007 08:14:19.191091 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/fe3d321f-e7cf-44de-9419-b7e40191ba52-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "fe3d321f-e7cf-44de-9419-b7e40191ba52" (UID: "fe3d321f-e7cf-44de-9419-b7e40191ba52"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 07 08:14:19 crc kubenswrapper[4875]: I1007 08:14:19.191125 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fe3d321f-e7cf-44de-9419-b7e40191ba52-logs\") pod \"fe3d321f-e7cf-44de-9419-b7e40191ba52\" (UID: \"fe3d321f-e7cf-44de-9419-b7e40191ba52\") " Oct 07 08:14:19 crc kubenswrapper[4875]: I1007 08:14:19.191196 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fe3d321f-e7cf-44de-9419-b7e40191ba52-config-data\") pod \"fe3d321f-e7cf-44de-9419-b7e40191ba52\" (UID: \"fe3d321f-e7cf-44de-9419-b7e40191ba52\") " Oct 07 08:14:19 crc kubenswrapper[4875]: I1007 08:14:19.191249 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fe3d321f-e7cf-44de-9419-b7e40191ba52-scripts\") pod \"fe3d321f-e7cf-44de-9419-b7e40191ba52\" (UID: \"fe3d321f-e7cf-44de-9419-b7e40191ba52\") " Oct 07 08:14:19 crc kubenswrapper[4875]: I1007 08:14:19.191301 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bv699\" (UniqueName: \"kubernetes.io/projected/fe3d321f-e7cf-44de-9419-b7e40191ba52-kube-api-access-bv699\") pod \"fe3d321f-e7cf-44de-9419-b7e40191ba52\" (UID: \"fe3d321f-e7cf-44de-9419-b7e40191ba52\") " Oct 07 08:14:19 crc kubenswrapper[4875]: I1007 08:14:19.191355 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fe3d321f-e7cf-44de-9419-b7e40191ba52-combined-ca-bundle\") pod \"fe3d321f-e7cf-44de-9419-b7e40191ba52\" (UID: \"fe3d321f-e7cf-44de-9419-b7e40191ba52\") " Oct 07 08:14:19 crc kubenswrapper[4875]: I1007 08:14:19.191814 4875 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/fe3d321f-e7cf-44de-9419-b7e40191ba52-etc-machine-id\") on node \"crc\" DevicePath \"\"" Oct 07 08:14:19 crc kubenswrapper[4875]: I1007 08:14:19.196983 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fe3d321f-e7cf-44de-9419-b7e40191ba52-logs" (OuterVolumeSpecName: "logs") pod "fe3d321f-e7cf-44de-9419-b7e40191ba52" (UID: "fe3d321f-e7cf-44de-9419-b7e40191ba52"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 08:14:19 crc kubenswrapper[4875]: I1007 08:14:19.201009 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fe3d321f-e7cf-44de-9419-b7e40191ba52-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "fe3d321f-e7cf-44de-9419-b7e40191ba52" (UID: "fe3d321f-e7cf-44de-9419-b7e40191ba52"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:14:19 crc kubenswrapper[4875]: I1007 08:14:19.209057 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fe3d321f-e7cf-44de-9419-b7e40191ba52-scripts" (OuterVolumeSpecName: "scripts") pod "fe3d321f-e7cf-44de-9419-b7e40191ba52" (UID: "fe3d321f-e7cf-44de-9419-b7e40191ba52"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:14:19 crc kubenswrapper[4875]: I1007 08:14:19.209294 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fe3d321f-e7cf-44de-9419-b7e40191ba52-kube-api-access-bv699" (OuterVolumeSpecName: "kube-api-access-bv699") pod "fe3d321f-e7cf-44de-9419-b7e40191ba52" (UID: "fe3d321f-e7cf-44de-9419-b7e40191ba52"). InnerVolumeSpecName "kube-api-access-bv699". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 08:14:19 crc kubenswrapper[4875]: I1007 08:14:19.283976 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fe3d321f-e7cf-44de-9419-b7e40191ba52-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "fe3d321f-e7cf-44de-9419-b7e40191ba52" (UID: "fe3d321f-e7cf-44de-9419-b7e40191ba52"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:14:19 crc kubenswrapper[4875]: I1007 08:14:19.290362 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fe3d321f-e7cf-44de-9419-b7e40191ba52-config-data" (OuterVolumeSpecName: "config-data") pod "fe3d321f-e7cf-44de-9419-b7e40191ba52" (UID: "fe3d321f-e7cf-44de-9419-b7e40191ba52"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:14:19 crc kubenswrapper[4875]: I1007 08:14:19.293067 4875 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fe3d321f-e7cf-44de-9419-b7e40191ba52-scripts\") on node \"crc\" DevicePath \"\"" Oct 07 08:14:19 crc kubenswrapper[4875]: I1007 08:14:19.293101 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bv699\" (UniqueName: \"kubernetes.io/projected/fe3d321f-e7cf-44de-9419-b7e40191ba52-kube-api-access-bv699\") on node \"crc\" DevicePath \"\"" Oct 07 08:14:19 crc kubenswrapper[4875]: I1007 08:14:19.293114 4875 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fe3d321f-e7cf-44de-9419-b7e40191ba52-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 08:14:19 crc kubenswrapper[4875]: I1007 08:14:19.293122 4875 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/fe3d321f-e7cf-44de-9419-b7e40191ba52-config-data-custom\") on node \"crc\" DevicePath \"\"" Oct 07 08:14:19 crc kubenswrapper[4875]: I1007 08:14:19.293133 4875 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fe3d321f-e7cf-44de-9419-b7e40191ba52-logs\") on node \"crc\" DevicePath \"\"" Oct 07 08:14:19 crc kubenswrapper[4875]: I1007 08:14:19.293141 4875 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fe3d321f-e7cf-44de-9419-b7e40191ba52-config-data\") on node \"crc\" DevicePath \"\"" Oct 07 08:14:19 crc kubenswrapper[4875]: I1007 08:14:19.442582 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-5f5f877689-scxcf"] Oct 07 08:14:19 crc kubenswrapper[4875]: I1007 08:14:19.688011 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"fe3d321f-e7cf-44de-9419-b7e40191ba52","Type":"ContainerDied","Data":"a0eb5a07d2b2a3435371e439ebb1990a5f617da338068acae36b90b893f325f7"} Oct 07 08:14:19 crc kubenswrapper[4875]: I1007 08:14:19.688072 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Oct 07 08:14:19 crc kubenswrapper[4875]: I1007 08:14:19.688090 4875 scope.go:117] "RemoveContainer" containerID="5c66dbd662b1ea390e464faa8f0b52756dda9d6a3717be12fb70a4ed197cd536" Oct 07 08:14:19 crc kubenswrapper[4875]: I1007 08:14:19.695173 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"df5f11ac-252a-4de8-9493-261081690646","Type":"ContainerStarted","Data":"d7dba3dafa292528bbf375d40c9ee3c4080bd19a6a9e91c346ff3278dc62eebb"} Oct 07 08:14:19 crc kubenswrapper[4875]: I1007 08:14:19.712760 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5f5f877689-scxcf" event={"ID":"a42b578e-6f65-40c9-94f4-2b843c305470","Type":"ContainerStarted","Data":"e45cf2e299c80747d67d4fd1f938e686219ca38d06c48ba9ecb5b4cbd7b8e4c8"} Oct 07 08:14:19 crc kubenswrapper[4875]: I1007 08:14:19.712836 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"9c287068-593f-426c-b370-94917005d8e9","Type":"ContainerStarted","Data":"1f85e3fefaf6f6ea41101cd9a4723ea34065be2da1dca2044c9c9d14eef64dff"} Oct 07 08:14:19 crc kubenswrapper[4875]: I1007 08:14:19.712853 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"9c287068-593f-426c-b370-94917005d8e9","Type":"ContainerStarted","Data":"36b49f11468d343a739eaca8aa7c9dd5e134e49467862e07684cd659cedcc05e"} Oct 07 08:14:19 crc kubenswrapper[4875]: I1007 08:14:19.719834 4875 scope.go:117] "RemoveContainer" containerID="757bca6721457bc333e537841d42024e97076d2b285a9bbd4c0162a3eee80b9d" Oct 07 08:14:19 crc kubenswrapper[4875]: I1007 08:14:19.725948 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=5.304226382 podStartE2EDuration="7.72582244s" podCreationTimestamp="2025-10-07 08:14:12 +0000 UTC" firstStartedPulling="2025-10-07 08:14:14.115392598 +0000 UTC m=+1079.075163141" lastFinishedPulling="2025-10-07 08:14:16.536988656 +0000 UTC m=+1081.496759199" observedRunningTime="2025-10-07 08:14:19.72093131 +0000 UTC m=+1084.680701863" watchObservedRunningTime="2025-10-07 08:14:19.72582244 +0000 UTC m=+1084.685592983" Oct 07 08:14:19 crc kubenswrapper[4875]: I1007 08:14:19.756959 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Oct 07 08:14:19 crc kubenswrapper[4875]: I1007 08:14:19.765602 4875 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-api-0"] Oct 07 08:14:19 crc kubenswrapper[4875]: I1007 08:14:19.771983 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Oct 07 08:14:19 crc kubenswrapper[4875]: E1007 08:14:19.772438 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fe3d321f-e7cf-44de-9419-b7e40191ba52" containerName="cinder-api" Oct 07 08:14:19 crc kubenswrapper[4875]: I1007 08:14:19.772458 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="fe3d321f-e7cf-44de-9419-b7e40191ba52" containerName="cinder-api" Oct 07 08:14:19 crc kubenswrapper[4875]: E1007 08:14:19.772500 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fe3d321f-e7cf-44de-9419-b7e40191ba52" containerName="cinder-api-log" Oct 07 08:14:19 crc kubenswrapper[4875]: I1007 08:14:19.772509 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="fe3d321f-e7cf-44de-9419-b7e40191ba52" containerName="cinder-api-log" Oct 07 08:14:19 crc kubenswrapper[4875]: I1007 08:14:19.772729 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="fe3d321f-e7cf-44de-9419-b7e40191ba52" containerName="cinder-api" Oct 07 08:14:19 crc kubenswrapper[4875]: I1007 08:14:19.772753 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="fe3d321f-e7cf-44de-9419-b7e40191ba52" containerName="cinder-api-log" Oct 07 08:14:19 crc kubenswrapper[4875]: I1007 08:14:19.773747 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Oct 07 08:14:19 crc kubenswrapper[4875]: I1007 08:14:19.781180 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Oct 07 08:14:19 crc kubenswrapper[4875]: I1007 08:14:19.781639 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-internal-svc" Oct 07 08:14:19 crc kubenswrapper[4875]: I1007 08:14:19.781812 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-public-svc" Oct 07 08:14:19 crc kubenswrapper[4875]: I1007 08:14:19.798672 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Oct 07 08:14:19 crc kubenswrapper[4875]: I1007 08:14:19.909191 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/3a93ed97-4ce8-45f0-b81c-52e3613ea189-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"3a93ed97-4ce8-45f0-b81c-52e3613ea189\") " pod="openstack/cinder-api-0" Oct 07 08:14:19 crc kubenswrapper[4875]: I1007 08:14:19.909310 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3a93ed97-4ce8-45f0-b81c-52e3613ea189-logs\") pod \"cinder-api-0\" (UID: \"3a93ed97-4ce8-45f0-b81c-52e3613ea189\") " pod="openstack/cinder-api-0" Oct 07 08:14:19 crc kubenswrapper[4875]: I1007 08:14:19.909343 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6v98s\" (UniqueName: \"kubernetes.io/projected/3a93ed97-4ce8-45f0-b81c-52e3613ea189-kube-api-access-6v98s\") pod \"cinder-api-0\" (UID: \"3a93ed97-4ce8-45f0-b81c-52e3613ea189\") " pod="openstack/cinder-api-0" Oct 07 08:14:19 crc kubenswrapper[4875]: I1007 08:14:19.909418 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/3a93ed97-4ce8-45f0-b81c-52e3613ea189-public-tls-certs\") pod \"cinder-api-0\" (UID: \"3a93ed97-4ce8-45f0-b81c-52e3613ea189\") " pod="openstack/cinder-api-0" Oct 07 08:14:19 crc kubenswrapper[4875]: I1007 08:14:19.909464 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3a93ed97-4ce8-45f0-b81c-52e3613ea189-config-data-custom\") pod \"cinder-api-0\" (UID: \"3a93ed97-4ce8-45f0-b81c-52e3613ea189\") " pod="openstack/cinder-api-0" Oct 07 08:14:19 crc kubenswrapper[4875]: I1007 08:14:19.909505 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3a93ed97-4ce8-45f0-b81c-52e3613ea189-config-data\") pod \"cinder-api-0\" (UID: \"3a93ed97-4ce8-45f0-b81c-52e3613ea189\") " pod="openstack/cinder-api-0" Oct 07 08:14:19 crc kubenswrapper[4875]: I1007 08:14:19.909531 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3a93ed97-4ce8-45f0-b81c-52e3613ea189-scripts\") pod \"cinder-api-0\" (UID: \"3a93ed97-4ce8-45f0-b81c-52e3613ea189\") " pod="openstack/cinder-api-0" Oct 07 08:14:19 crc kubenswrapper[4875]: I1007 08:14:19.909571 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3a93ed97-4ce8-45f0-b81c-52e3613ea189-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"3a93ed97-4ce8-45f0-b81c-52e3613ea189\") " pod="openstack/cinder-api-0" Oct 07 08:14:19 crc kubenswrapper[4875]: I1007 08:14:19.909597 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/3a93ed97-4ce8-45f0-b81c-52e3613ea189-etc-machine-id\") pod \"cinder-api-0\" (UID: \"3a93ed97-4ce8-45f0-b81c-52e3613ea189\") " pod="openstack/cinder-api-0" Oct 07 08:14:20 crc kubenswrapper[4875]: I1007 08:14:20.011003 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/3a93ed97-4ce8-45f0-b81c-52e3613ea189-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"3a93ed97-4ce8-45f0-b81c-52e3613ea189\") " pod="openstack/cinder-api-0" Oct 07 08:14:20 crc kubenswrapper[4875]: I1007 08:14:20.011092 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3a93ed97-4ce8-45f0-b81c-52e3613ea189-logs\") pod \"cinder-api-0\" (UID: \"3a93ed97-4ce8-45f0-b81c-52e3613ea189\") " pod="openstack/cinder-api-0" Oct 07 08:14:20 crc kubenswrapper[4875]: I1007 08:14:20.011121 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6v98s\" (UniqueName: \"kubernetes.io/projected/3a93ed97-4ce8-45f0-b81c-52e3613ea189-kube-api-access-6v98s\") pod \"cinder-api-0\" (UID: \"3a93ed97-4ce8-45f0-b81c-52e3613ea189\") " pod="openstack/cinder-api-0" Oct 07 08:14:20 crc kubenswrapper[4875]: I1007 08:14:20.011169 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/3a93ed97-4ce8-45f0-b81c-52e3613ea189-public-tls-certs\") pod \"cinder-api-0\" (UID: \"3a93ed97-4ce8-45f0-b81c-52e3613ea189\") " pod="openstack/cinder-api-0" Oct 07 08:14:20 crc kubenswrapper[4875]: I1007 08:14:20.011204 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3a93ed97-4ce8-45f0-b81c-52e3613ea189-config-data-custom\") pod \"cinder-api-0\" (UID: \"3a93ed97-4ce8-45f0-b81c-52e3613ea189\") " pod="openstack/cinder-api-0" Oct 07 08:14:20 crc kubenswrapper[4875]: I1007 08:14:20.011239 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3a93ed97-4ce8-45f0-b81c-52e3613ea189-config-data\") pod \"cinder-api-0\" (UID: \"3a93ed97-4ce8-45f0-b81c-52e3613ea189\") " pod="openstack/cinder-api-0" Oct 07 08:14:20 crc kubenswrapper[4875]: I1007 08:14:20.011269 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3a93ed97-4ce8-45f0-b81c-52e3613ea189-scripts\") pod \"cinder-api-0\" (UID: \"3a93ed97-4ce8-45f0-b81c-52e3613ea189\") " pod="openstack/cinder-api-0" Oct 07 08:14:20 crc kubenswrapper[4875]: I1007 08:14:20.011321 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3a93ed97-4ce8-45f0-b81c-52e3613ea189-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"3a93ed97-4ce8-45f0-b81c-52e3613ea189\") " pod="openstack/cinder-api-0" Oct 07 08:14:20 crc kubenswrapper[4875]: I1007 08:14:20.011349 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/3a93ed97-4ce8-45f0-b81c-52e3613ea189-etc-machine-id\") pod \"cinder-api-0\" (UID: \"3a93ed97-4ce8-45f0-b81c-52e3613ea189\") " pod="openstack/cinder-api-0" Oct 07 08:14:20 crc kubenswrapper[4875]: I1007 08:14:20.011444 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/3a93ed97-4ce8-45f0-b81c-52e3613ea189-etc-machine-id\") pod \"cinder-api-0\" (UID: \"3a93ed97-4ce8-45f0-b81c-52e3613ea189\") " pod="openstack/cinder-api-0" Oct 07 08:14:20 crc kubenswrapper[4875]: I1007 08:14:20.011806 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3a93ed97-4ce8-45f0-b81c-52e3613ea189-logs\") pod \"cinder-api-0\" (UID: \"3a93ed97-4ce8-45f0-b81c-52e3613ea189\") " pod="openstack/cinder-api-0" Oct 07 08:14:20 crc kubenswrapper[4875]: I1007 08:14:20.020239 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3a93ed97-4ce8-45f0-b81c-52e3613ea189-scripts\") pod \"cinder-api-0\" (UID: \"3a93ed97-4ce8-45f0-b81c-52e3613ea189\") " pod="openstack/cinder-api-0" Oct 07 08:14:20 crc kubenswrapper[4875]: I1007 08:14:20.020754 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/3a93ed97-4ce8-45f0-b81c-52e3613ea189-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"3a93ed97-4ce8-45f0-b81c-52e3613ea189\") " pod="openstack/cinder-api-0" Oct 07 08:14:20 crc kubenswrapper[4875]: I1007 08:14:20.024728 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3a93ed97-4ce8-45f0-b81c-52e3613ea189-config-data\") pod \"cinder-api-0\" (UID: \"3a93ed97-4ce8-45f0-b81c-52e3613ea189\") " pod="openstack/cinder-api-0" Oct 07 08:14:20 crc kubenswrapper[4875]: I1007 08:14:20.026181 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/3a93ed97-4ce8-45f0-b81c-52e3613ea189-public-tls-certs\") pod \"cinder-api-0\" (UID: \"3a93ed97-4ce8-45f0-b81c-52e3613ea189\") " pod="openstack/cinder-api-0" Oct 07 08:14:20 crc kubenswrapper[4875]: I1007 08:14:20.026734 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3a93ed97-4ce8-45f0-b81c-52e3613ea189-config-data-custom\") pod \"cinder-api-0\" (UID: \"3a93ed97-4ce8-45f0-b81c-52e3613ea189\") " pod="openstack/cinder-api-0" Oct 07 08:14:20 crc kubenswrapper[4875]: I1007 08:14:20.032586 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3a93ed97-4ce8-45f0-b81c-52e3613ea189-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"3a93ed97-4ce8-45f0-b81c-52e3613ea189\") " pod="openstack/cinder-api-0" Oct 07 08:14:20 crc kubenswrapper[4875]: I1007 08:14:20.037102 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6v98s\" (UniqueName: \"kubernetes.io/projected/3a93ed97-4ce8-45f0-b81c-52e3613ea189-kube-api-access-6v98s\") pod \"cinder-api-0\" (UID: \"3a93ed97-4ce8-45f0-b81c-52e3613ea189\") " pod="openstack/cinder-api-0" Oct 07 08:14:20 crc kubenswrapper[4875]: I1007 08:14:20.097215 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Oct 07 08:14:20 crc kubenswrapper[4875]: I1007 08:14:20.690562 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Oct 07 08:14:20 crc kubenswrapper[4875]: W1007 08:14:20.694380 4875 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3a93ed97_4ce8_45f0_b81c_52e3613ea189.slice/crio-540a264cb930cc611be07a48742f579d5d865c961a2836b7aedf2d35ee1390c5 WatchSource:0}: Error finding container 540a264cb930cc611be07a48742f579d5d865c961a2836b7aedf2d35ee1390c5: Status 404 returned error can't find the container with id 540a264cb930cc611be07a48742f579d5d865c961a2836b7aedf2d35ee1390c5 Oct 07 08:14:20 crc kubenswrapper[4875]: I1007 08:14:20.714659 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"3a93ed97-4ce8-45f0-b81c-52e3613ea189","Type":"ContainerStarted","Data":"540a264cb930cc611be07a48742f579d5d865c961a2836b7aedf2d35ee1390c5"} Oct 07 08:14:20 crc kubenswrapper[4875]: I1007 08:14:20.716365 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5f5f877689-scxcf" event={"ID":"a42b578e-6f65-40c9-94f4-2b843c305470","Type":"ContainerStarted","Data":"c86915ebf5b4f8f97cdd9f76abfcd985f0a5269e750606abe0f5de90a3a52d72"} Oct 07 08:14:20 crc kubenswrapper[4875]: I1007 08:14:20.716391 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5f5f877689-scxcf" event={"ID":"a42b578e-6f65-40c9-94f4-2b843c305470","Type":"ContainerStarted","Data":"a48f88d10d896b13b62f3ef0b87c833a6f8c3cefbd56a2de8c427fc7d2aad9a1"} Oct 07 08:14:20 crc kubenswrapper[4875]: I1007 08:14:20.717602 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-5f5f877689-scxcf" Oct 07 08:14:20 crc kubenswrapper[4875]: I1007 08:14:20.728707 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"9c287068-593f-426c-b370-94917005d8e9","Type":"ContainerStarted","Data":"f78036b224835188f862fe10d2995d3a4ffe5f69491a003ff044dbf55ee172c8"} Oct 07 08:14:20 crc kubenswrapper[4875]: I1007 08:14:20.753733 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-5f5f877689-scxcf" podStartSLOduration=2.753718087 podStartE2EDuration="2.753718087s" podCreationTimestamp="2025-10-07 08:14:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 08:14:20.750653637 +0000 UTC m=+1085.710424180" watchObservedRunningTime="2025-10-07 08:14:20.753718087 +0000 UTC m=+1085.713488630" Oct 07 08:14:20 crc kubenswrapper[4875]: I1007 08:14:20.868038 4875 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-6f66dddcf4-7856h" podUID="d86ba6fc-0914-42b4-a080-94a2173841bf" containerName="barbican-api" probeResult="failure" output="Get \"http://10.217.0.158:9311/healthcheck\": read tcp 10.217.0.2:46940->10.217.0.158:9311: read: connection reset by peer" Oct 07 08:14:20 crc kubenswrapper[4875]: I1007 08:14:20.868197 4875 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-6f66dddcf4-7856h" podUID="d86ba6fc-0914-42b4-a080-94a2173841bf" containerName="barbican-api-log" probeResult="failure" output="Get \"http://10.217.0.158:9311/healthcheck\": read tcp 10.217.0.2:46946->10.217.0.158:9311: read: connection reset by peer" Oct 07 08:14:21 crc kubenswrapper[4875]: I1007 08:14:21.262316 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-6f66dddcf4-7856h" Oct 07 08:14:21 crc kubenswrapper[4875]: I1007 08:14:21.358685 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d86ba6fc-0914-42b4-a080-94a2173841bf-combined-ca-bundle\") pod \"d86ba6fc-0914-42b4-a080-94a2173841bf\" (UID: \"d86ba6fc-0914-42b4-a080-94a2173841bf\") " Oct 07 08:14:21 crc kubenswrapper[4875]: I1007 08:14:21.358749 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d86ba6fc-0914-42b4-a080-94a2173841bf-config-data-custom\") pod \"d86ba6fc-0914-42b4-a080-94a2173841bf\" (UID: \"d86ba6fc-0914-42b4-a080-94a2173841bf\") " Oct 07 08:14:21 crc kubenswrapper[4875]: I1007 08:14:21.358773 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d86ba6fc-0914-42b4-a080-94a2173841bf-config-data\") pod \"d86ba6fc-0914-42b4-a080-94a2173841bf\" (UID: \"d86ba6fc-0914-42b4-a080-94a2173841bf\") " Oct 07 08:14:21 crc kubenswrapper[4875]: I1007 08:14:21.358819 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d86ba6fc-0914-42b4-a080-94a2173841bf-logs\") pod \"d86ba6fc-0914-42b4-a080-94a2173841bf\" (UID: \"d86ba6fc-0914-42b4-a080-94a2173841bf\") " Oct 07 08:14:21 crc kubenswrapper[4875]: I1007 08:14:21.358981 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-chpzc\" (UniqueName: \"kubernetes.io/projected/d86ba6fc-0914-42b4-a080-94a2173841bf-kube-api-access-chpzc\") pod \"d86ba6fc-0914-42b4-a080-94a2173841bf\" (UID: \"d86ba6fc-0914-42b4-a080-94a2173841bf\") " Oct 07 08:14:21 crc kubenswrapper[4875]: I1007 08:14:21.361259 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d86ba6fc-0914-42b4-a080-94a2173841bf-logs" (OuterVolumeSpecName: "logs") pod "d86ba6fc-0914-42b4-a080-94a2173841bf" (UID: "d86ba6fc-0914-42b4-a080-94a2173841bf"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 08:14:21 crc kubenswrapper[4875]: I1007 08:14:21.364527 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d86ba6fc-0914-42b4-a080-94a2173841bf-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "d86ba6fc-0914-42b4-a080-94a2173841bf" (UID: "d86ba6fc-0914-42b4-a080-94a2173841bf"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:14:21 crc kubenswrapper[4875]: I1007 08:14:21.367125 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d86ba6fc-0914-42b4-a080-94a2173841bf-kube-api-access-chpzc" (OuterVolumeSpecName: "kube-api-access-chpzc") pod "d86ba6fc-0914-42b4-a080-94a2173841bf" (UID: "d86ba6fc-0914-42b4-a080-94a2173841bf"). InnerVolumeSpecName "kube-api-access-chpzc". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 08:14:21 crc kubenswrapper[4875]: I1007 08:14:21.389567 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d86ba6fc-0914-42b4-a080-94a2173841bf-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d86ba6fc-0914-42b4-a080-94a2173841bf" (UID: "d86ba6fc-0914-42b4-a080-94a2173841bf"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:14:21 crc kubenswrapper[4875]: I1007 08:14:21.427465 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d86ba6fc-0914-42b4-a080-94a2173841bf-config-data" (OuterVolumeSpecName: "config-data") pod "d86ba6fc-0914-42b4-a080-94a2173841bf" (UID: "d86ba6fc-0914-42b4-a080-94a2173841bf"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:14:21 crc kubenswrapper[4875]: I1007 08:14:21.461777 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-chpzc\" (UniqueName: \"kubernetes.io/projected/d86ba6fc-0914-42b4-a080-94a2173841bf-kube-api-access-chpzc\") on node \"crc\" DevicePath \"\"" Oct 07 08:14:21 crc kubenswrapper[4875]: I1007 08:14:21.461826 4875 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d86ba6fc-0914-42b4-a080-94a2173841bf-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 08:14:21 crc kubenswrapper[4875]: I1007 08:14:21.461836 4875 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d86ba6fc-0914-42b4-a080-94a2173841bf-config-data-custom\") on node \"crc\" DevicePath \"\"" Oct 07 08:14:21 crc kubenswrapper[4875]: I1007 08:14:21.461845 4875 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d86ba6fc-0914-42b4-a080-94a2173841bf-config-data\") on node \"crc\" DevicePath \"\"" Oct 07 08:14:21 crc kubenswrapper[4875]: I1007 08:14:21.461856 4875 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d86ba6fc-0914-42b4-a080-94a2173841bf-logs\") on node \"crc\" DevicePath \"\"" Oct 07 08:14:21 crc kubenswrapper[4875]: I1007 08:14:21.623255 4875 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/horizon-8486dbbd8b-5n679" Oct 07 08:14:21 crc kubenswrapper[4875]: I1007 08:14:21.695128 4875 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/horizon-64854f4c8-d67s8" Oct 07 08:14:21 crc kubenswrapper[4875]: I1007 08:14:21.719553 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fe3d321f-e7cf-44de-9419-b7e40191ba52" path="/var/lib/kubelet/pods/fe3d321f-e7cf-44de-9419-b7e40191ba52/volumes" Oct 07 08:14:21 crc kubenswrapper[4875]: I1007 08:14:21.751147 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"9c287068-593f-426c-b370-94917005d8e9","Type":"ContainerStarted","Data":"fe45f768b02a79b62860547256922c9c8528953ecddd76b1b453c15e676bd542"} Oct 07 08:14:21 crc kubenswrapper[4875]: I1007 08:14:21.753024 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"3a93ed97-4ce8-45f0-b81c-52e3613ea189","Type":"ContainerStarted","Data":"d4d35730bb425fb8ed1a5322d1f6be51e37c592a057727f0272efaf950e69c9d"} Oct 07 08:14:21 crc kubenswrapper[4875]: I1007 08:14:21.755351 4875 generic.go:334] "Generic (PLEG): container finished" podID="d86ba6fc-0914-42b4-a080-94a2173841bf" containerID="ad026997af1e389ced04c4015430116b8376e3f10ca6b554005afca59ac16da2" exitCode=0 Oct 07 08:14:21 crc kubenswrapper[4875]: I1007 08:14:21.756506 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-6f66dddcf4-7856h" Oct 07 08:14:21 crc kubenswrapper[4875]: I1007 08:14:21.756552 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-6f66dddcf4-7856h" event={"ID":"d86ba6fc-0914-42b4-a080-94a2173841bf","Type":"ContainerDied","Data":"ad026997af1e389ced04c4015430116b8376e3f10ca6b554005afca59ac16da2"} Oct 07 08:14:21 crc kubenswrapper[4875]: I1007 08:14:21.756578 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-6f66dddcf4-7856h" event={"ID":"d86ba6fc-0914-42b4-a080-94a2173841bf","Type":"ContainerDied","Data":"27e6e5e3418e4c1c82efb5edec9404b5c1cc07495d0434e0cb98e75b47d0608a"} Oct 07 08:14:21 crc kubenswrapper[4875]: I1007 08:14:21.756595 4875 scope.go:117] "RemoveContainer" containerID="ad026997af1e389ced04c4015430116b8376e3f10ca6b554005afca59ac16da2" Oct 07 08:14:21 crc kubenswrapper[4875]: I1007 08:14:21.787368 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-6f66dddcf4-7856h"] Oct 07 08:14:21 crc kubenswrapper[4875]: I1007 08:14:21.797596 4875 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-api-6f66dddcf4-7856h"] Oct 07 08:14:21 crc kubenswrapper[4875]: I1007 08:14:21.808113 4875 scope.go:117] "RemoveContainer" containerID="b9a61dd90d3fd8b7295bc98fd42493a7bd87aa633a941b86484df5f414ea2035" Oct 07 08:14:21 crc kubenswrapper[4875]: I1007 08:14:21.851136 4875 scope.go:117] "RemoveContainer" containerID="ad026997af1e389ced04c4015430116b8376e3f10ca6b554005afca59ac16da2" Oct 07 08:14:21 crc kubenswrapper[4875]: E1007 08:14:21.851744 4875 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ad026997af1e389ced04c4015430116b8376e3f10ca6b554005afca59ac16da2\": container with ID starting with ad026997af1e389ced04c4015430116b8376e3f10ca6b554005afca59ac16da2 not found: ID does not exist" containerID="ad026997af1e389ced04c4015430116b8376e3f10ca6b554005afca59ac16da2" Oct 07 08:14:21 crc kubenswrapper[4875]: I1007 08:14:21.851782 4875 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ad026997af1e389ced04c4015430116b8376e3f10ca6b554005afca59ac16da2"} err="failed to get container status \"ad026997af1e389ced04c4015430116b8376e3f10ca6b554005afca59ac16da2\": rpc error: code = NotFound desc = could not find container \"ad026997af1e389ced04c4015430116b8376e3f10ca6b554005afca59ac16da2\": container with ID starting with ad026997af1e389ced04c4015430116b8376e3f10ca6b554005afca59ac16da2 not found: ID does not exist" Oct 07 08:14:21 crc kubenswrapper[4875]: I1007 08:14:21.851804 4875 scope.go:117] "RemoveContainer" containerID="b9a61dd90d3fd8b7295bc98fd42493a7bd87aa633a941b86484df5f414ea2035" Oct 07 08:14:21 crc kubenswrapper[4875]: E1007 08:14:21.854142 4875 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b9a61dd90d3fd8b7295bc98fd42493a7bd87aa633a941b86484df5f414ea2035\": container with ID starting with b9a61dd90d3fd8b7295bc98fd42493a7bd87aa633a941b86484df5f414ea2035 not found: ID does not exist" containerID="b9a61dd90d3fd8b7295bc98fd42493a7bd87aa633a941b86484df5f414ea2035" Oct 07 08:14:21 crc kubenswrapper[4875]: I1007 08:14:21.854193 4875 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b9a61dd90d3fd8b7295bc98fd42493a7bd87aa633a941b86484df5f414ea2035"} err="failed to get container status \"b9a61dd90d3fd8b7295bc98fd42493a7bd87aa633a941b86484df5f414ea2035\": rpc error: code = NotFound desc = could not find container \"b9a61dd90d3fd8b7295bc98fd42493a7bd87aa633a941b86484df5f414ea2035\": container with ID starting with b9a61dd90d3fd8b7295bc98fd42493a7bd87aa633a941b86484df5f414ea2035 not found: ID does not exist" Oct 07 08:14:22 crc kubenswrapper[4875]: I1007 08:14:22.772608 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"9c287068-593f-426c-b370-94917005d8e9","Type":"ContainerStarted","Data":"1bb12e888b0e23138aba16b6f95f527c73a5981077075641de7e2ca210fe1511"} Oct 07 08:14:22 crc kubenswrapper[4875]: I1007 08:14:22.773350 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Oct 07 08:14:22 crc kubenswrapper[4875]: I1007 08:14:22.777422 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"3a93ed97-4ce8-45f0-b81c-52e3613ea189","Type":"ContainerStarted","Data":"78b3a10775a7a5c2df899ba00f4207264e4302ad92271c7f59301a2ebccd335b"} Oct 07 08:14:22 crc kubenswrapper[4875]: I1007 08:14:22.777459 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Oct 07 08:14:22 crc kubenswrapper[4875]: I1007 08:14:22.804332 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.159504478 podStartE2EDuration="5.804311361s" podCreationTimestamp="2025-10-07 08:14:17 +0000 UTC" firstStartedPulling="2025-10-07 08:14:18.815043052 +0000 UTC m=+1083.774813595" lastFinishedPulling="2025-10-07 08:14:22.459849945 +0000 UTC m=+1087.419620478" observedRunningTime="2025-10-07 08:14:22.800358481 +0000 UTC m=+1087.760129034" watchObservedRunningTime="2025-10-07 08:14:22.804311361 +0000 UTC m=+1087.764081904" Oct 07 08:14:22 crc kubenswrapper[4875]: I1007 08:14:22.913893 4875 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Oct 07 08:14:23 crc kubenswrapper[4875]: I1007 08:14:23.124817 4875 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Oct 07 08:14:23 crc kubenswrapper[4875]: I1007 08:14:23.145209 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=4.145187068 podStartE2EDuration="4.145187068s" podCreationTimestamp="2025-10-07 08:14:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 08:14:22.832391971 +0000 UTC m=+1087.792162524" watchObservedRunningTime="2025-10-07 08:14:23.145187068 +0000 UTC m=+1088.104957611" Oct 07 08:14:23 crc kubenswrapper[4875]: I1007 08:14:23.371567 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/horizon-8486dbbd8b-5n679" Oct 07 08:14:23 crc kubenswrapper[4875]: I1007 08:14:23.576140 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/horizon-64854f4c8-d67s8" Oct 07 08:14:23 crc kubenswrapper[4875]: I1007 08:14:23.634224 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-8486dbbd8b-5n679"] Oct 07 08:14:23 crc kubenswrapper[4875]: I1007 08:14:23.716451 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d86ba6fc-0914-42b4-a080-94a2173841bf" path="/var/lib/kubelet/pods/d86ba6fc-0914-42b4-a080-94a2173841bf/volumes" Oct 07 08:14:23 crc kubenswrapper[4875]: I1007 08:14:23.787179 4875 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-8486dbbd8b-5n679" podUID="35b9d27a-06f2-4b00-917f-f078fdf1b1c2" containerName="horizon-log" containerID="cri-o://eea62b4426de789b955235a7b6786d9e1687399a33512d2eac8353e901f57601" gracePeriod=30 Oct 07 08:14:23 crc kubenswrapper[4875]: I1007 08:14:23.787549 4875 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-8486dbbd8b-5n679" podUID="35b9d27a-06f2-4b00-917f-f078fdf1b1c2" containerName="horizon" containerID="cri-o://84ac59b9e48b4282c66bf5edf3989e3485afe16efb106940292657dba1fad9f1" gracePeriod=30 Oct 07 08:14:23 crc kubenswrapper[4875]: I1007 08:14:23.828801 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Oct 07 08:14:24 crc kubenswrapper[4875]: I1007 08:14:24.796476 4875 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="df5f11ac-252a-4de8-9493-261081690646" containerName="cinder-scheduler" containerID="cri-o://5898bc02da32e377a8ac0ea1bdd26f7653e9ef37c05899016ba2fef10d88c798" gracePeriod=30 Oct 07 08:14:24 crc kubenswrapper[4875]: I1007 08:14:24.796557 4875 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="df5f11ac-252a-4de8-9493-261081690646" containerName="probe" containerID="cri-o://d7dba3dafa292528bbf375d40c9ee3c4080bd19a6a9e91c346ff3278dc62eebb" gracePeriod=30 Oct 07 08:14:24 crc kubenswrapper[4875]: I1007 08:14:24.957039 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-6578955fd5-6k89r" Oct 07 08:14:25 crc kubenswrapper[4875]: I1007 08:14:25.029163 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7c67bffd47-7tr7b"] Oct 07 08:14:25 crc kubenswrapper[4875]: I1007 08:14:25.029665 4875 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-7c67bffd47-7tr7b" podUID="71b8b921-955b-48c7-ad48-d07a99b590f1" containerName="dnsmasq-dns" containerID="cri-o://94827a03e3d7087f8329f692e36bdc52b3061482411d608047c3c29a31495d44" gracePeriod=10 Oct 07 08:14:25 crc kubenswrapper[4875]: I1007 08:14:25.069298 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-548b9747d4-bkwjt" Oct 07 08:14:25 crc kubenswrapper[4875]: I1007 08:14:25.418777 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-548b9747d4-bkwjt" Oct 07 08:14:25 crc kubenswrapper[4875]: I1007 08:14:25.648084 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7c67bffd47-7tr7b" Oct 07 08:14:25 crc kubenswrapper[4875]: I1007 08:14:25.758315 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/71b8b921-955b-48c7-ad48-d07a99b590f1-ovsdbserver-nb\") pod \"71b8b921-955b-48c7-ad48-d07a99b590f1\" (UID: \"71b8b921-955b-48c7-ad48-d07a99b590f1\") " Oct 07 08:14:25 crc kubenswrapper[4875]: I1007 08:14:25.758430 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/71b8b921-955b-48c7-ad48-d07a99b590f1-dns-svc\") pod \"71b8b921-955b-48c7-ad48-d07a99b590f1\" (UID: \"71b8b921-955b-48c7-ad48-d07a99b590f1\") " Oct 07 08:14:25 crc kubenswrapper[4875]: I1007 08:14:25.758545 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9l687\" (UniqueName: \"kubernetes.io/projected/71b8b921-955b-48c7-ad48-d07a99b590f1-kube-api-access-9l687\") pod \"71b8b921-955b-48c7-ad48-d07a99b590f1\" (UID: \"71b8b921-955b-48c7-ad48-d07a99b590f1\") " Oct 07 08:14:25 crc kubenswrapper[4875]: I1007 08:14:25.758639 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/71b8b921-955b-48c7-ad48-d07a99b590f1-dns-swift-storage-0\") pod \"71b8b921-955b-48c7-ad48-d07a99b590f1\" (UID: \"71b8b921-955b-48c7-ad48-d07a99b590f1\") " Oct 07 08:14:25 crc kubenswrapper[4875]: I1007 08:14:25.758663 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/71b8b921-955b-48c7-ad48-d07a99b590f1-ovsdbserver-sb\") pod \"71b8b921-955b-48c7-ad48-d07a99b590f1\" (UID: \"71b8b921-955b-48c7-ad48-d07a99b590f1\") " Oct 07 08:14:25 crc kubenswrapper[4875]: I1007 08:14:25.758735 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/71b8b921-955b-48c7-ad48-d07a99b590f1-config\") pod \"71b8b921-955b-48c7-ad48-d07a99b590f1\" (UID: \"71b8b921-955b-48c7-ad48-d07a99b590f1\") " Oct 07 08:14:25 crc kubenswrapper[4875]: I1007 08:14:25.807134 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/71b8b921-955b-48c7-ad48-d07a99b590f1-kube-api-access-9l687" (OuterVolumeSpecName: "kube-api-access-9l687") pod "71b8b921-955b-48c7-ad48-d07a99b590f1" (UID: "71b8b921-955b-48c7-ad48-d07a99b590f1"). InnerVolumeSpecName "kube-api-access-9l687". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 08:14:25 crc kubenswrapper[4875]: I1007 08:14:25.815182 4875 generic.go:334] "Generic (PLEG): container finished" podID="71b8b921-955b-48c7-ad48-d07a99b590f1" containerID="94827a03e3d7087f8329f692e36bdc52b3061482411d608047c3c29a31495d44" exitCode=0 Oct 07 08:14:25 crc kubenswrapper[4875]: I1007 08:14:25.815353 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7c67bffd47-7tr7b" event={"ID":"71b8b921-955b-48c7-ad48-d07a99b590f1","Type":"ContainerDied","Data":"94827a03e3d7087f8329f692e36bdc52b3061482411d608047c3c29a31495d44"} Oct 07 08:14:25 crc kubenswrapper[4875]: I1007 08:14:25.815485 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7c67bffd47-7tr7b" event={"ID":"71b8b921-955b-48c7-ad48-d07a99b590f1","Type":"ContainerDied","Data":"cbc1a8731a7709d525a71e8e3da1cb4b94962fc2a4fc2b09d08fe4a5f83ba1a0"} Oct 07 08:14:25 crc kubenswrapper[4875]: I1007 08:14:25.815508 4875 scope.go:117] "RemoveContainer" containerID="94827a03e3d7087f8329f692e36bdc52b3061482411d608047c3c29a31495d44" Oct 07 08:14:25 crc kubenswrapper[4875]: I1007 08:14:25.815710 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7c67bffd47-7tr7b" Oct 07 08:14:25 crc kubenswrapper[4875]: I1007 08:14:25.830903 4875 generic.go:334] "Generic (PLEG): container finished" podID="df5f11ac-252a-4de8-9493-261081690646" containerID="d7dba3dafa292528bbf375d40c9ee3c4080bd19a6a9e91c346ff3278dc62eebb" exitCode=0 Oct 07 08:14:25 crc kubenswrapper[4875]: I1007 08:14:25.831019 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"df5f11ac-252a-4de8-9493-261081690646","Type":"ContainerDied","Data":"d7dba3dafa292528bbf375d40c9ee3c4080bd19a6a9e91c346ff3278dc62eebb"} Oct 07 08:14:25 crc kubenswrapper[4875]: I1007 08:14:25.846221 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/71b8b921-955b-48c7-ad48-d07a99b590f1-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "71b8b921-955b-48c7-ad48-d07a99b590f1" (UID: "71b8b921-955b-48c7-ad48-d07a99b590f1"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 08:14:25 crc kubenswrapper[4875]: I1007 08:14:25.848126 4875 scope.go:117] "RemoveContainer" containerID="c399163faccbf7cc6e78bc0dce5a4f6881823100835d42dd5d42551571a50024" Oct 07 08:14:25 crc kubenswrapper[4875]: I1007 08:14:25.853050 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/71b8b921-955b-48c7-ad48-d07a99b590f1-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "71b8b921-955b-48c7-ad48-d07a99b590f1" (UID: "71b8b921-955b-48c7-ad48-d07a99b590f1"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 08:14:25 crc kubenswrapper[4875]: I1007 08:14:25.858242 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/71b8b921-955b-48c7-ad48-d07a99b590f1-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "71b8b921-955b-48c7-ad48-d07a99b590f1" (UID: "71b8b921-955b-48c7-ad48-d07a99b590f1"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 08:14:25 crc kubenswrapper[4875]: I1007 08:14:25.860112 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/71b8b921-955b-48c7-ad48-d07a99b590f1-config" (OuterVolumeSpecName: "config") pod "71b8b921-955b-48c7-ad48-d07a99b590f1" (UID: "71b8b921-955b-48c7-ad48-d07a99b590f1"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 08:14:25 crc kubenswrapper[4875]: I1007 08:14:25.860459 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/71b8b921-955b-48c7-ad48-d07a99b590f1-config\") pod \"71b8b921-955b-48c7-ad48-d07a99b590f1\" (UID: \"71b8b921-955b-48c7-ad48-d07a99b590f1\") " Oct 07 08:14:25 crc kubenswrapper[4875]: W1007 08:14:25.860585 4875 empty_dir.go:500] Warning: Unmount skipped because path does not exist: /var/lib/kubelet/pods/71b8b921-955b-48c7-ad48-d07a99b590f1/volumes/kubernetes.io~configmap/config Oct 07 08:14:25 crc kubenswrapper[4875]: I1007 08:14:25.860606 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/71b8b921-955b-48c7-ad48-d07a99b590f1-config" (OuterVolumeSpecName: "config") pod "71b8b921-955b-48c7-ad48-d07a99b590f1" (UID: "71b8b921-955b-48c7-ad48-d07a99b590f1"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 08:14:25 crc kubenswrapper[4875]: I1007 08:14:25.861064 4875 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/71b8b921-955b-48c7-ad48-d07a99b590f1-config\") on node \"crc\" DevicePath \"\"" Oct 07 08:14:25 crc kubenswrapper[4875]: I1007 08:14:25.861084 4875 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/71b8b921-955b-48c7-ad48-d07a99b590f1-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 07 08:14:25 crc kubenswrapper[4875]: I1007 08:14:25.861099 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9l687\" (UniqueName: \"kubernetes.io/projected/71b8b921-955b-48c7-ad48-d07a99b590f1-kube-api-access-9l687\") on node \"crc\" DevicePath \"\"" Oct 07 08:14:25 crc kubenswrapper[4875]: I1007 08:14:25.861110 4875 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/71b8b921-955b-48c7-ad48-d07a99b590f1-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Oct 07 08:14:25 crc kubenswrapper[4875]: I1007 08:14:25.861118 4875 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/71b8b921-955b-48c7-ad48-d07a99b590f1-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 07 08:14:25 crc kubenswrapper[4875]: I1007 08:14:25.864908 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/71b8b921-955b-48c7-ad48-d07a99b590f1-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "71b8b921-955b-48c7-ad48-d07a99b590f1" (UID: "71b8b921-955b-48c7-ad48-d07a99b590f1"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 08:14:25 crc kubenswrapper[4875]: I1007 08:14:25.877028 4875 scope.go:117] "RemoveContainer" containerID="94827a03e3d7087f8329f692e36bdc52b3061482411d608047c3c29a31495d44" Oct 07 08:14:25 crc kubenswrapper[4875]: E1007 08:14:25.877597 4875 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"94827a03e3d7087f8329f692e36bdc52b3061482411d608047c3c29a31495d44\": container with ID starting with 94827a03e3d7087f8329f692e36bdc52b3061482411d608047c3c29a31495d44 not found: ID does not exist" containerID="94827a03e3d7087f8329f692e36bdc52b3061482411d608047c3c29a31495d44" Oct 07 08:14:25 crc kubenswrapper[4875]: I1007 08:14:25.877645 4875 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"94827a03e3d7087f8329f692e36bdc52b3061482411d608047c3c29a31495d44"} err="failed to get container status \"94827a03e3d7087f8329f692e36bdc52b3061482411d608047c3c29a31495d44\": rpc error: code = NotFound desc = could not find container \"94827a03e3d7087f8329f692e36bdc52b3061482411d608047c3c29a31495d44\": container with ID starting with 94827a03e3d7087f8329f692e36bdc52b3061482411d608047c3c29a31495d44 not found: ID does not exist" Oct 07 08:14:25 crc kubenswrapper[4875]: I1007 08:14:25.877679 4875 scope.go:117] "RemoveContainer" containerID="c399163faccbf7cc6e78bc0dce5a4f6881823100835d42dd5d42551571a50024" Oct 07 08:14:25 crc kubenswrapper[4875]: E1007 08:14:25.878079 4875 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c399163faccbf7cc6e78bc0dce5a4f6881823100835d42dd5d42551571a50024\": container with ID starting with c399163faccbf7cc6e78bc0dce5a4f6881823100835d42dd5d42551571a50024 not found: ID does not exist" containerID="c399163faccbf7cc6e78bc0dce5a4f6881823100835d42dd5d42551571a50024" Oct 07 08:14:25 crc kubenswrapper[4875]: I1007 08:14:25.878204 4875 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c399163faccbf7cc6e78bc0dce5a4f6881823100835d42dd5d42551571a50024"} err="failed to get container status \"c399163faccbf7cc6e78bc0dce5a4f6881823100835d42dd5d42551571a50024\": rpc error: code = NotFound desc = could not find container \"c399163faccbf7cc6e78bc0dce5a4f6881823100835d42dd5d42551571a50024\": container with ID starting with c399163faccbf7cc6e78bc0dce5a4f6881823100835d42dd5d42551571a50024 not found: ID does not exist" Oct 07 08:14:25 crc kubenswrapper[4875]: I1007 08:14:25.963210 4875 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/71b8b921-955b-48c7-ad48-d07a99b590f1-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 07 08:14:26 crc kubenswrapper[4875]: I1007 08:14:26.150299 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7c67bffd47-7tr7b"] Oct 07 08:14:26 crc kubenswrapper[4875]: I1007 08:14:26.158060 4875 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-7c67bffd47-7tr7b"] Oct 07 08:14:26 crc kubenswrapper[4875]: I1007 08:14:26.847993 4875 generic.go:334] "Generic (PLEG): container finished" podID="df5f11ac-252a-4de8-9493-261081690646" containerID="5898bc02da32e377a8ac0ea1bdd26f7653e9ef37c05899016ba2fef10d88c798" exitCode=0 Oct 07 08:14:26 crc kubenswrapper[4875]: I1007 08:14:26.848032 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"df5f11ac-252a-4de8-9493-261081690646","Type":"ContainerDied","Data":"5898bc02da32e377a8ac0ea1bdd26f7653e9ef37c05899016ba2fef10d88c798"} Oct 07 08:14:27 crc kubenswrapper[4875]: I1007 08:14:27.061982 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Oct 07 08:14:27 crc kubenswrapper[4875]: I1007 08:14:27.187352 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/df5f11ac-252a-4de8-9493-261081690646-scripts\") pod \"df5f11ac-252a-4de8-9493-261081690646\" (UID: \"df5f11ac-252a-4de8-9493-261081690646\") " Oct 07 08:14:27 crc kubenswrapper[4875]: I1007 08:14:27.187455 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/df5f11ac-252a-4de8-9493-261081690646-etc-machine-id\") pod \"df5f11ac-252a-4de8-9493-261081690646\" (UID: \"df5f11ac-252a-4de8-9493-261081690646\") " Oct 07 08:14:27 crc kubenswrapper[4875]: I1007 08:14:27.187535 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/df5f11ac-252a-4de8-9493-261081690646-combined-ca-bundle\") pod \"df5f11ac-252a-4de8-9493-261081690646\" (UID: \"df5f11ac-252a-4de8-9493-261081690646\") " Oct 07 08:14:27 crc kubenswrapper[4875]: I1007 08:14:27.187576 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/df5f11ac-252a-4de8-9493-261081690646-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "df5f11ac-252a-4de8-9493-261081690646" (UID: "df5f11ac-252a-4de8-9493-261081690646"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 07 08:14:27 crc kubenswrapper[4875]: I1007 08:14:27.187637 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/df5f11ac-252a-4de8-9493-261081690646-config-data\") pod \"df5f11ac-252a-4de8-9493-261081690646\" (UID: \"df5f11ac-252a-4de8-9493-261081690646\") " Oct 07 08:14:27 crc kubenswrapper[4875]: I1007 08:14:27.187681 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6cm7p\" (UniqueName: \"kubernetes.io/projected/df5f11ac-252a-4de8-9493-261081690646-kube-api-access-6cm7p\") pod \"df5f11ac-252a-4de8-9493-261081690646\" (UID: \"df5f11ac-252a-4de8-9493-261081690646\") " Oct 07 08:14:27 crc kubenswrapper[4875]: I1007 08:14:27.187931 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/df5f11ac-252a-4de8-9493-261081690646-config-data-custom\") pod \"df5f11ac-252a-4de8-9493-261081690646\" (UID: \"df5f11ac-252a-4de8-9493-261081690646\") " Oct 07 08:14:27 crc kubenswrapper[4875]: I1007 08:14:27.188306 4875 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/df5f11ac-252a-4de8-9493-261081690646-etc-machine-id\") on node \"crc\" DevicePath \"\"" Oct 07 08:14:27 crc kubenswrapper[4875]: I1007 08:14:27.192477 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/df5f11ac-252a-4de8-9493-261081690646-scripts" (OuterVolumeSpecName: "scripts") pod "df5f11ac-252a-4de8-9493-261081690646" (UID: "df5f11ac-252a-4de8-9493-261081690646"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:14:27 crc kubenswrapper[4875]: I1007 08:14:27.193180 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/df5f11ac-252a-4de8-9493-261081690646-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "df5f11ac-252a-4de8-9493-261081690646" (UID: "df5f11ac-252a-4de8-9493-261081690646"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:14:27 crc kubenswrapper[4875]: I1007 08:14:27.194122 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/df5f11ac-252a-4de8-9493-261081690646-kube-api-access-6cm7p" (OuterVolumeSpecName: "kube-api-access-6cm7p") pod "df5f11ac-252a-4de8-9493-261081690646" (UID: "df5f11ac-252a-4de8-9493-261081690646"). InnerVolumeSpecName "kube-api-access-6cm7p". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 08:14:27 crc kubenswrapper[4875]: I1007 08:14:27.256198 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/df5f11ac-252a-4de8-9493-261081690646-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "df5f11ac-252a-4de8-9493-261081690646" (UID: "df5f11ac-252a-4de8-9493-261081690646"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:14:27 crc kubenswrapper[4875]: I1007 08:14:27.290477 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6cm7p\" (UniqueName: \"kubernetes.io/projected/df5f11ac-252a-4de8-9493-261081690646-kube-api-access-6cm7p\") on node \"crc\" DevicePath \"\"" Oct 07 08:14:27 crc kubenswrapper[4875]: I1007 08:14:27.290511 4875 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/df5f11ac-252a-4de8-9493-261081690646-config-data-custom\") on node \"crc\" DevicePath \"\"" Oct 07 08:14:27 crc kubenswrapper[4875]: I1007 08:14:27.290521 4875 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/df5f11ac-252a-4de8-9493-261081690646-scripts\") on node \"crc\" DevicePath \"\"" Oct 07 08:14:27 crc kubenswrapper[4875]: I1007 08:14:27.290530 4875 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/df5f11ac-252a-4de8-9493-261081690646-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 08:14:27 crc kubenswrapper[4875]: I1007 08:14:27.315231 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/df5f11ac-252a-4de8-9493-261081690646-config-data" (OuterVolumeSpecName: "config-data") pod "df5f11ac-252a-4de8-9493-261081690646" (UID: "df5f11ac-252a-4de8-9493-261081690646"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:14:27 crc kubenswrapper[4875]: I1007 08:14:27.392060 4875 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/df5f11ac-252a-4de8-9493-261081690646-config-data\") on node \"crc\" DevicePath \"\"" Oct 07 08:14:27 crc kubenswrapper[4875]: I1007 08:14:27.707469 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="71b8b921-955b-48c7-ad48-d07a99b590f1" path="/var/lib/kubelet/pods/71b8b921-955b-48c7-ad48-d07a99b590f1/volumes" Oct 07 08:14:27 crc kubenswrapper[4875]: I1007 08:14:27.860589 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"df5f11ac-252a-4de8-9493-261081690646","Type":"ContainerDied","Data":"103bdc5bcec2d24e42bc3cd01210b37f931c0b161f105d4f3f6af2bfc15dba6a"} Oct 07 08:14:27 crc kubenswrapper[4875]: I1007 08:14:27.860604 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Oct 07 08:14:27 crc kubenswrapper[4875]: I1007 08:14:27.860997 4875 scope.go:117] "RemoveContainer" containerID="d7dba3dafa292528bbf375d40c9ee3c4080bd19a6a9e91c346ff3278dc62eebb" Oct 07 08:14:27 crc kubenswrapper[4875]: I1007 08:14:27.864860 4875 generic.go:334] "Generic (PLEG): container finished" podID="35b9d27a-06f2-4b00-917f-f078fdf1b1c2" containerID="84ac59b9e48b4282c66bf5edf3989e3485afe16efb106940292657dba1fad9f1" exitCode=0 Oct 07 08:14:27 crc kubenswrapper[4875]: I1007 08:14:27.865081 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-8486dbbd8b-5n679" event={"ID":"35b9d27a-06f2-4b00-917f-f078fdf1b1c2","Type":"ContainerDied","Data":"84ac59b9e48b4282c66bf5edf3989e3485afe16efb106940292657dba1fad9f1"} Oct 07 08:14:27 crc kubenswrapper[4875]: I1007 08:14:27.898576 4875 scope.go:117] "RemoveContainer" containerID="5898bc02da32e377a8ac0ea1bdd26f7653e9ef37c05899016ba2fef10d88c798" Oct 07 08:14:27 crc kubenswrapper[4875]: I1007 08:14:27.907849 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Oct 07 08:14:27 crc kubenswrapper[4875]: I1007 08:14:27.918666 4875 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-scheduler-0"] Oct 07 08:14:27 crc kubenswrapper[4875]: I1007 08:14:27.928889 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Oct 07 08:14:27 crc kubenswrapper[4875]: E1007 08:14:27.929423 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d86ba6fc-0914-42b4-a080-94a2173841bf" containerName="barbican-api" Oct 07 08:14:27 crc kubenswrapper[4875]: I1007 08:14:27.929441 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="d86ba6fc-0914-42b4-a080-94a2173841bf" containerName="barbican-api" Oct 07 08:14:27 crc kubenswrapper[4875]: E1007 08:14:27.929466 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d86ba6fc-0914-42b4-a080-94a2173841bf" containerName="barbican-api-log" Oct 07 08:14:27 crc kubenswrapper[4875]: I1007 08:14:27.929475 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="d86ba6fc-0914-42b4-a080-94a2173841bf" containerName="barbican-api-log" Oct 07 08:14:27 crc kubenswrapper[4875]: E1007 08:14:27.929495 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="71b8b921-955b-48c7-ad48-d07a99b590f1" containerName="dnsmasq-dns" Oct 07 08:14:27 crc kubenswrapper[4875]: I1007 08:14:27.929503 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="71b8b921-955b-48c7-ad48-d07a99b590f1" containerName="dnsmasq-dns" Oct 07 08:14:27 crc kubenswrapper[4875]: E1007 08:14:27.929515 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="71b8b921-955b-48c7-ad48-d07a99b590f1" containerName="init" Oct 07 08:14:27 crc kubenswrapper[4875]: I1007 08:14:27.929523 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="71b8b921-955b-48c7-ad48-d07a99b590f1" containerName="init" Oct 07 08:14:27 crc kubenswrapper[4875]: E1007 08:14:27.929543 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="df5f11ac-252a-4de8-9493-261081690646" containerName="cinder-scheduler" Oct 07 08:14:27 crc kubenswrapper[4875]: I1007 08:14:27.929553 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="df5f11ac-252a-4de8-9493-261081690646" containerName="cinder-scheduler" Oct 07 08:14:27 crc kubenswrapper[4875]: E1007 08:14:27.929568 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="df5f11ac-252a-4de8-9493-261081690646" containerName="probe" Oct 07 08:14:27 crc kubenswrapper[4875]: I1007 08:14:27.929576 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="df5f11ac-252a-4de8-9493-261081690646" containerName="probe" Oct 07 08:14:27 crc kubenswrapper[4875]: I1007 08:14:27.930021 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="d86ba6fc-0914-42b4-a080-94a2173841bf" containerName="barbican-api" Oct 07 08:14:27 crc kubenswrapper[4875]: I1007 08:14:27.930044 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="df5f11ac-252a-4de8-9493-261081690646" containerName="cinder-scheduler" Oct 07 08:14:27 crc kubenswrapper[4875]: I1007 08:14:27.930062 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="d86ba6fc-0914-42b4-a080-94a2173841bf" containerName="barbican-api-log" Oct 07 08:14:27 crc kubenswrapper[4875]: I1007 08:14:27.930073 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="71b8b921-955b-48c7-ad48-d07a99b590f1" containerName="dnsmasq-dns" Oct 07 08:14:27 crc kubenswrapper[4875]: I1007 08:14:27.930084 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="df5f11ac-252a-4de8-9493-261081690646" containerName="probe" Oct 07 08:14:27 crc kubenswrapper[4875]: I1007 08:14:27.932136 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Oct 07 08:14:27 crc kubenswrapper[4875]: I1007 08:14:27.938564 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Oct 07 08:14:27 crc kubenswrapper[4875]: I1007 08:14:27.960564 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Oct 07 08:14:28 crc kubenswrapper[4875]: I1007 08:14:28.008417 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/63b2e079-16c2-4f61-8ea4-a0fd50150f03-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"63b2e079-16c2-4f61-8ea4-a0fd50150f03\") " pod="openstack/cinder-scheduler-0" Oct 07 08:14:28 crc kubenswrapper[4875]: I1007 08:14:28.008491 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-szr6q\" (UniqueName: \"kubernetes.io/projected/63b2e079-16c2-4f61-8ea4-a0fd50150f03-kube-api-access-szr6q\") pod \"cinder-scheduler-0\" (UID: \"63b2e079-16c2-4f61-8ea4-a0fd50150f03\") " pod="openstack/cinder-scheduler-0" Oct 07 08:14:28 crc kubenswrapper[4875]: I1007 08:14:28.008521 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/63b2e079-16c2-4f61-8ea4-a0fd50150f03-scripts\") pod \"cinder-scheduler-0\" (UID: \"63b2e079-16c2-4f61-8ea4-a0fd50150f03\") " pod="openstack/cinder-scheduler-0" Oct 07 08:14:28 crc kubenswrapper[4875]: I1007 08:14:28.008566 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/63b2e079-16c2-4f61-8ea4-a0fd50150f03-config-data\") pod \"cinder-scheduler-0\" (UID: \"63b2e079-16c2-4f61-8ea4-a0fd50150f03\") " pod="openstack/cinder-scheduler-0" Oct 07 08:14:28 crc kubenswrapper[4875]: I1007 08:14:28.008583 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/63b2e079-16c2-4f61-8ea4-a0fd50150f03-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"63b2e079-16c2-4f61-8ea4-a0fd50150f03\") " pod="openstack/cinder-scheduler-0" Oct 07 08:14:28 crc kubenswrapper[4875]: I1007 08:14:28.008703 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/63b2e079-16c2-4f61-8ea4-a0fd50150f03-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"63b2e079-16c2-4f61-8ea4-a0fd50150f03\") " pod="openstack/cinder-scheduler-0" Oct 07 08:14:28 crc kubenswrapper[4875]: I1007 08:14:28.111355 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/63b2e079-16c2-4f61-8ea4-a0fd50150f03-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"63b2e079-16c2-4f61-8ea4-a0fd50150f03\") " pod="openstack/cinder-scheduler-0" Oct 07 08:14:28 crc kubenswrapper[4875]: I1007 08:14:28.111441 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/63b2e079-16c2-4f61-8ea4-a0fd50150f03-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"63b2e079-16c2-4f61-8ea4-a0fd50150f03\") " pod="openstack/cinder-scheduler-0" Oct 07 08:14:28 crc kubenswrapper[4875]: I1007 08:14:28.111465 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-szr6q\" (UniqueName: \"kubernetes.io/projected/63b2e079-16c2-4f61-8ea4-a0fd50150f03-kube-api-access-szr6q\") pod \"cinder-scheduler-0\" (UID: \"63b2e079-16c2-4f61-8ea4-a0fd50150f03\") " pod="openstack/cinder-scheduler-0" Oct 07 08:14:28 crc kubenswrapper[4875]: I1007 08:14:28.111504 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/63b2e079-16c2-4f61-8ea4-a0fd50150f03-scripts\") pod \"cinder-scheduler-0\" (UID: \"63b2e079-16c2-4f61-8ea4-a0fd50150f03\") " pod="openstack/cinder-scheduler-0" Oct 07 08:14:28 crc kubenswrapper[4875]: I1007 08:14:28.111587 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/63b2e079-16c2-4f61-8ea4-a0fd50150f03-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"63b2e079-16c2-4f61-8ea4-a0fd50150f03\") " pod="openstack/cinder-scheduler-0" Oct 07 08:14:28 crc kubenswrapper[4875]: I1007 08:14:28.111609 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/63b2e079-16c2-4f61-8ea4-a0fd50150f03-config-data\") pod \"cinder-scheduler-0\" (UID: \"63b2e079-16c2-4f61-8ea4-a0fd50150f03\") " pod="openstack/cinder-scheduler-0" Oct 07 08:14:28 crc kubenswrapper[4875]: I1007 08:14:28.112382 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/63b2e079-16c2-4f61-8ea4-a0fd50150f03-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"63b2e079-16c2-4f61-8ea4-a0fd50150f03\") " pod="openstack/cinder-scheduler-0" Oct 07 08:14:28 crc kubenswrapper[4875]: I1007 08:14:28.116227 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/63b2e079-16c2-4f61-8ea4-a0fd50150f03-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"63b2e079-16c2-4f61-8ea4-a0fd50150f03\") " pod="openstack/cinder-scheduler-0" Oct 07 08:14:28 crc kubenswrapper[4875]: I1007 08:14:28.116735 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/63b2e079-16c2-4f61-8ea4-a0fd50150f03-scripts\") pod \"cinder-scheduler-0\" (UID: \"63b2e079-16c2-4f61-8ea4-a0fd50150f03\") " pod="openstack/cinder-scheduler-0" Oct 07 08:14:28 crc kubenswrapper[4875]: I1007 08:14:28.137623 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/63b2e079-16c2-4f61-8ea4-a0fd50150f03-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"63b2e079-16c2-4f61-8ea4-a0fd50150f03\") " pod="openstack/cinder-scheduler-0" Oct 07 08:14:28 crc kubenswrapper[4875]: I1007 08:14:28.138149 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/63b2e079-16c2-4f61-8ea4-a0fd50150f03-config-data\") pod \"cinder-scheduler-0\" (UID: \"63b2e079-16c2-4f61-8ea4-a0fd50150f03\") " pod="openstack/cinder-scheduler-0" Oct 07 08:14:28 crc kubenswrapper[4875]: I1007 08:14:28.140617 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-szr6q\" (UniqueName: \"kubernetes.io/projected/63b2e079-16c2-4f61-8ea4-a0fd50150f03-kube-api-access-szr6q\") pod \"cinder-scheduler-0\" (UID: \"63b2e079-16c2-4f61-8ea4-a0fd50150f03\") " pod="openstack/cinder-scheduler-0" Oct 07 08:14:28 crc kubenswrapper[4875]: I1007 08:14:28.253700 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Oct 07 08:14:28 crc kubenswrapper[4875]: I1007 08:14:28.587584 4875 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-8486dbbd8b-5n679" podUID="35b9d27a-06f2-4b00-917f-f078fdf1b1c2" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.148:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.148:8443: connect: connection refused" Oct 07 08:14:28 crc kubenswrapper[4875]: I1007 08:14:28.782533 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Oct 07 08:14:28 crc kubenswrapper[4875]: W1007 08:14:28.810987 4875 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod63b2e079_16c2_4f61_8ea4_a0fd50150f03.slice/crio-3557abe11b0a2ba41e626a80fa077ff027f239bafec4a1df0e32f93cb7ba5883 WatchSource:0}: Error finding container 3557abe11b0a2ba41e626a80fa077ff027f239bafec4a1df0e32f93cb7ba5883: Status 404 returned error can't find the container with id 3557abe11b0a2ba41e626a80fa077ff027f239bafec4a1df0e32f93cb7ba5883 Oct 07 08:14:28 crc kubenswrapper[4875]: I1007 08:14:28.888745 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"63b2e079-16c2-4f61-8ea4-a0fd50150f03","Type":"ContainerStarted","Data":"3557abe11b0a2ba41e626a80fa077ff027f239bafec4a1df0e32f93cb7ba5883"} Oct 07 08:14:29 crc kubenswrapper[4875]: I1007 08:14:29.062021 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/keystone-754566d8d4-jzmbw" Oct 07 08:14:29 crc kubenswrapper[4875]: I1007 08:14:29.692498 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Oct 07 08:14:29 crc kubenswrapper[4875]: I1007 08:14:29.695168 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Oct 07 08:14:29 crc kubenswrapper[4875]: I1007 08:14:29.700353 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Oct 07 08:14:29 crc kubenswrapper[4875]: I1007 08:14:29.701200 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstackclient-openstackclient-dockercfg-t8q65" Oct 07 08:14:29 crc kubenswrapper[4875]: I1007 08:14:29.702125 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config" Oct 07 08:14:29 crc kubenswrapper[4875]: I1007 08:14:29.702415 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-config-secret" Oct 07 08:14:29 crc kubenswrapper[4875]: I1007 08:14:29.755450 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c25e751b-f41b-4571-92e4-81d1b263ed48-combined-ca-bundle\") pod \"openstackclient\" (UID: \"c25e751b-f41b-4571-92e4-81d1b263ed48\") " pod="openstack/openstackclient" Oct 07 08:14:29 crc kubenswrapper[4875]: I1007 08:14:29.755529 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/c25e751b-f41b-4571-92e4-81d1b263ed48-openstack-config\") pod \"openstackclient\" (UID: \"c25e751b-f41b-4571-92e4-81d1b263ed48\") " pod="openstack/openstackclient" Oct 07 08:14:29 crc kubenswrapper[4875]: I1007 08:14:29.755566 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lzlnk\" (UniqueName: \"kubernetes.io/projected/c25e751b-f41b-4571-92e4-81d1b263ed48-kube-api-access-lzlnk\") pod \"openstackclient\" (UID: \"c25e751b-f41b-4571-92e4-81d1b263ed48\") " pod="openstack/openstackclient" Oct 07 08:14:29 crc kubenswrapper[4875]: I1007 08:14:29.755606 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/c25e751b-f41b-4571-92e4-81d1b263ed48-openstack-config-secret\") pod \"openstackclient\" (UID: \"c25e751b-f41b-4571-92e4-81d1b263ed48\") " pod="openstack/openstackclient" Oct 07 08:14:29 crc kubenswrapper[4875]: I1007 08:14:29.758524 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="df5f11ac-252a-4de8-9493-261081690646" path="/var/lib/kubelet/pods/df5f11ac-252a-4de8-9493-261081690646/volumes" Oct 07 08:14:29 crc kubenswrapper[4875]: I1007 08:14:29.860511 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c25e751b-f41b-4571-92e4-81d1b263ed48-combined-ca-bundle\") pod \"openstackclient\" (UID: \"c25e751b-f41b-4571-92e4-81d1b263ed48\") " pod="openstack/openstackclient" Oct 07 08:14:29 crc kubenswrapper[4875]: I1007 08:14:29.860626 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/c25e751b-f41b-4571-92e4-81d1b263ed48-openstack-config\") pod \"openstackclient\" (UID: \"c25e751b-f41b-4571-92e4-81d1b263ed48\") " pod="openstack/openstackclient" Oct 07 08:14:29 crc kubenswrapper[4875]: I1007 08:14:29.860688 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lzlnk\" (UniqueName: \"kubernetes.io/projected/c25e751b-f41b-4571-92e4-81d1b263ed48-kube-api-access-lzlnk\") pod \"openstackclient\" (UID: \"c25e751b-f41b-4571-92e4-81d1b263ed48\") " pod="openstack/openstackclient" Oct 07 08:14:29 crc kubenswrapper[4875]: I1007 08:14:29.860728 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/c25e751b-f41b-4571-92e4-81d1b263ed48-openstack-config-secret\") pod \"openstackclient\" (UID: \"c25e751b-f41b-4571-92e4-81d1b263ed48\") " pod="openstack/openstackclient" Oct 07 08:14:29 crc kubenswrapper[4875]: I1007 08:14:29.862446 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/c25e751b-f41b-4571-92e4-81d1b263ed48-openstack-config\") pod \"openstackclient\" (UID: \"c25e751b-f41b-4571-92e4-81d1b263ed48\") " pod="openstack/openstackclient" Oct 07 08:14:29 crc kubenswrapper[4875]: I1007 08:14:29.873187 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/c25e751b-f41b-4571-92e4-81d1b263ed48-openstack-config-secret\") pod \"openstackclient\" (UID: \"c25e751b-f41b-4571-92e4-81d1b263ed48\") " pod="openstack/openstackclient" Oct 07 08:14:29 crc kubenswrapper[4875]: I1007 08:14:29.883568 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c25e751b-f41b-4571-92e4-81d1b263ed48-combined-ca-bundle\") pod \"openstackclient\" (UID: \"c25e751b-f41b-4571-92e4-81d1b263ed48\") " pod="openstack/openstackclient" Oct 07 08:14:29 crc kubenswrapper[4875]: I1007 08:14:29.885187 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lzlnk\" (UniqueName: \"kubernetes.io/projected/c25e751b-f41b-4571-92e4-81d1b263ed48-kube-api-access-lzlnk\") pod \"openstackclient\" (UID: \"c25e751b-f41b-4571-92e4-81d1b263ed48\") " pod="openstack/openstackclient" Oct 07 08:14:29 crc kubenswrapper[4875]: I1007 08:14:29.938367 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"63b2e079-16c2-4f61-8ea4-a0fd50150f03","Type":"ContainerStarted","Data":"e5d765c89e494f8d7c236c69de39267bd7a28731ac65041c31b6434ab2944394"} Oct 07 08:14:30 crc kubenswrapper[4875]: I1007 08:14:30.048579 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Oct 07 08:14:30 crc kubenswrapper[4875]: I1007 08:14:30.361051 4875 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-7c67bffd47-7tr7b" podUID="71b8b921-955b-48c7-ad48-d07a99b590f1" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.157:5353: i/o timeout" Oct 07 08:14:30 crc kubenswrapper[4875]: I1007 08:14:30.646738 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Oct 07 08:14:30 crc kubenswrapper[4875]: I1007 08:14:30.951122 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"63b2e079-16c2-4f61-8ea4-a0fd50150f03","Type":"ContainerStarted","Data":"285662453c0949213ca043b202d1b5a9d0b1a43416154f083184be006ff19d20"} Oct 07 08:14:30 crc kubenswrapper[4875]: I1007 08:14:30.952231 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"c25e751b-f41b-4571-92e4-81d1b263ed48","Type":"ContainerStarted","Data":"aac43c2a3bfa2189676fd7d160a3a9dabad796b690e9e3fb75fdc8ac15c66247"} Oct 07 08:14:30 crc kubenswrapper[4875]: I1007 08:14:30.977624 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=3.977608599 podStartE2EDuration="3.977608599s" podCreationTimestamp="2025-10-07 08:14:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 08:14:30.970552268 +0000 UTC m=+1095.930322821" watchObservedRunningTime="2025-10-07 08:14:30.977608599 +0000 UTC m=+1095.937379142" Oct 07 08:14:31 crc kubenswrapper[4875]: I1007 08:14:31.220795 4875 patch_prober.go:28] interesting pod/machine-config-daemon-hx68m container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 07 08:14:31 crc kubenswrapper[4875]: I1007 08:14:31.220894 4875 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" podUID="3928c10c-c3da-41eb-96b2-629d67cfb31f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 07 08:14:32 crc kubenswrapper[4875]: I1007 08:14:32.463708 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cinder-api-0" Oct 07 08:14:33 crc kubenswrapper[4875]: I1007 08:14:33.254017 4875 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Oct 07 08:14:33 crc kubenswrapper[4875]: I1007 08:14:33.665401 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-proxy-88cf5ccd5-wf5cp"] Oct 07 08:14:33 crc kubenswrapper[4875]: I1007 08:14:33.671247 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-88cf5ccd5-wf5cp" Oct 07 08:14:33 crc kubenswrapper[4875]: I1007 08:14:33.676041 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Oct 07 08:14:33 crc kubenswrapper[4875]: I1007 08:14:33.676195 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-internal-svc" Oct 07 08:14:33 crc kubenswrapper[4875]: I1007 08:14:33.676650 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-public-svc" Oct 07 08:14:33 crc kubenswrapper[4875]: I1007 08:14:33.687220 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-88cf5ccd5-wf5cp"] Oct 07 08:14:33 crc kubenswrapper[4875]: I1007 08:14:33.754169 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/74566d25-db70-4528-b9c4-89b32863c2eb-etc-swift\") pod \"swift-proxy-88cf5ccd5-wf5cp\" (UID: \"74566d25-db70-4528-b9c4-89b32863c2eb\") " pod="openstack/swift-proxy-88cf5ccd5-wf5cp" Oct 07 08:14:33 crc kubenswrapper[4875]: I1007 08:14:33.754241 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/74566d25-db70-4528-b9c4-89b32863c2eb-run-httpd\") pod \"swift-proxy-88cf5ccd5-wf5cp\" (UID: \"74566d25-db70-4528-b9c4-89b32863c2eb\") " pod="openstack/swift-proxy-88cf5ccd5-wf5cp" Oct 07 08:14:33 crc kubenswrapper[4875]: I1007 08:14:33.754262 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/74566d25-db70-4528-b9c4-89b32863c2eb-internal-tls-certs\") pod \"swift-proxy-88cf5ccd5-wf5cp\" (UID: \"74566d25-db70-4528-b9c4-89b32863c2eb\") " pod="openstack/swift-proxy-88cf5ccd5-wf5cp" Oct 07 08:14:33 crc kubenswrapper[4875]: I1007 08:14:33.754313 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/74566d25-db70-4528-b9c4-89b32863c2eb-combined-ca-bundle\") pod \"swift-proxy-88cf5ccd5-wf5cp\" (UID: \"74566d25-db70-4528-b9c4-89b32863c2eb\") " pod="openstack/swift-proxy-88cf5ccd5-wf5cp" Oct 07 08:14:33 crc kubenswrapper[4875]: I1007 08:14:33.754361 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7t229\" (UniqueName: \"kubernetes.io/projected/74566d25-db70-4528-b9c4-89b32863c2eb-kube-api-access-7t229\") pod \"swift-proxy-88cf5ccd5-wf5cp\" (UID: \"74566d25-db70-4528-b9c4-89b32863c2eb\") " pod="openstack/swift-proxy-88cf5ccd5-wf5cp" Oct 07 08:14:33 crc kubenswrapper[4875]: I1007 08:14:33.754392 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/74566d25-db70-4528-b9c4-89b32863c2eb-log-httpd\") pod \"swift-proxy-88cf5ccd5-wf5cp\" (UID: \"74566d25-db70-4528-b9c4-89b32863c2eb\") " pod="openstack/swift-proxy-88cf5ccd5-wf5cp" Oct 07 08:14:33 crc kubenswrapper[4875]: I1007 08:14:33.754412 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/74566d25-db70-4528-b9c4-89b32863c2eb-config-data\") pod \"swift-proxy-88cf5ccd5-wf5cp\" (UID: \"74566d25-db70-4528-b9c4-89b32863c2eb\") " pod="openstack/swift-proxy-88cf5ccd5-wf5cp" Oct 07 08:14:33 crc kubenswrapper[4875]: I1007 08:14:33.754486 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/74566d25-db70-4528-b9c4-89b32863c2eb-public-tls-certs\") pod \"swift-proxy-88cf5ccd5-wf5cp\" (UID: \"74566d25-db70-4528-b9c4-89b32863c2eb\") " pod="openstack/swift-proxy-88cf5ccd5-wf5cp" Oct 07 08:14:33 crc kubenswrapper[4875]: I1007 08:14:33.856668 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/74566d25-db70-4528-b9c4-89b32863c2eb-run-httpd\") pod \"swift-proxy-88cf5ccd5-wf5cp\" (UID: \"74566d25-db70-4528-b9c4-89b32863c2eb\") " pod="openstack/swift-proxy-88cf5ccd5-wf5cp" Oct 07 08:14:33 crc kubenswrapper[4875]: I1007 08:14:33.856744 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/74566d25-db70-4528-b9c4-89b32863c2eb-internal-tls-certs\") pod \"swift-proxy-88cf5ccd5-wf5cp\" (UID: \"74566d25-db70-4528-b9c4-89b32863c2eb\") " pod="openstack/swift-proxy-88cf5ccd5-wf5cp" Oct 07 08:14:33 crc kubenswrapper[4875]: I1007 08:14:33.856807 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/74566d25-db70-4528-b9c4-89b32863c2eb-combined-ca-bundle\") pod \"swift-proxy-88cf5ccd5-wf5cp\" (UID: \"74566d25-db70-4528-b9c4-89b32863c2eb\") " pod="openstack/swift-proxy-88cf5ccd5-wf5cp" Oct 07 08:14:33 crc kubenswrapper[4875]: I1007 08:14:33.856868 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7t229\" (UniqueName: \"kubernetes.io/projected/74566d25-db70-4528-b9c4-89b32863c2eb-kube-api-access-7t229\") pod \"swift-proxy-88cf5ccd5-wf5cp\" (UID: \"74566d25-db70-4528-b9c4-89b32863c2eb\") " pod="openstack/swift-proxy-88cf5ccd5-wf5cp" Oct 07 08:14:33 crc kubenswrapper[4875]: I1007 08:14:33.856984 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/74566d25-db70-4528-b9c4-89b32863c2eb-log-httpd\") pod \"swift-proxy-88cf5ccd5-wf5cp\" (UID: \"74566d25-db70-4528-b9c4-89b32863c2eb\") " pod="openstack/swift-proxy-88cf5ccd5-wf5cp" Oct 07 08:14:33 crc kubenswrapper[4875]: I1007 08:14:33.857025 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/74566d25-db70-4528-b9c4-89b32863c2eb-config-data\") pod \"swift-proxy-88cf5ccd5-wf5cp\" (UID: \"74566d25-db70-4528-b9c4-89b32863c2eb\") " pod="openstack/swift-proxy-88cf5ccd5-wf5cp" Oct 07 08:14:33 crc kubenswrapper[4875]: I1007 08:14:33.857167 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/74566d25-db70-4528-b9c4-89b32863c2eb-public-tls-certs\") pod \"swift-proxy-88cf5ccd5-wf5cp\" (UID: \"74566d25-db70-4528-b9c4-89b32863c2eb\") " pod="openstack/swift-proxy-88cf5ccd5-wf5cp" Oct 07 08:14:33 crc kubenswrapper[4875]: I1007 08:14:33.857279 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/74566d25-db70-4528-b9c4-89b32863c2eb-etc-swift\") pod \"swift-proxy-88cf5ccd5-wf5cp\" (UID: \"74566d25-db70-4528-b9c4-89b32863c2eb\") " pod="openstack/swift-proxy-88cf5ccd5-wf5cp" Oct 07 08:14:33 crc kubenswrapper[4875]: I1007 08:14:33.858255 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/74566d25-db70-4528-b9c4-89b32863c2eb-run-httpd\") pod \"swift-proxy-88cf5ccd5-wf5cp\" (UID: \"74566d25-db70-4528-b9c4-89b32863c2eb\") " pod="openstack/swift-proxy-88cf5ccd5-wf5cp" Oct 07 08:14:33 crc kubenswrapper[4875]: I1007 08:14:33.859718 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/74566d25-db70-4528-b9c4-89b32863c2eb-log-httpd\") pod \"swift-proxy-88cf5ccd5-wf5cp\" (UID: \"74566d25-db70-4528-b9c4-89b32863c2eb\") " pod="openstack/swift-proxy-88cf5ccd5-wf5cp" Oct 07 08:14:33 crc kubenswrapper[4875]: I1007 08:14:33.866686 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/74566d25-db70-4528-b9c4-89b32863c2eb-internal-tls-certs\") pod \"swift-proxy-88cf5ccd5-wf5cp\" (UID: \"74566d25-db70-4528-b9c4-89b32863c2eb\") " pod="openstack/swift-proxy-88cf5ccd5-wf5cp" Oct 07 08:14:33 crc kubenswrapper[4875]: I1007 08:14:33.869958 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/74566d25-db70-4528-b9c4-89b32863c2eb-combined-ca-bundle\") pod \"swift-proxy-88cf5ccd5-wf5cp\" (UID: \"74566d25-db70-4528-b9c4-89b32863c2eb\") " pod="openstack/swift-proxy-88cf5ccd5-wf5cp" Oct 07 08:14:33 crc kubenswrapper[4875]: I1007 08:14:33.873761 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/74566d25-db70-4528-b9c4-89b32863c2eb-public-tls-certs\") pod \"swift-proxy-88cf5ccd5-wf5cp\" (UID: \"74566d25-db70-4528-b9c4-89b32863c2eb\") " pod="openstack/swift-proxy-88cf5ccd5-wf5cp" Oct 07 08:14:33 crc kubenswrapper[4875]: I1007 08:14:33.874795 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/74566d25-db70-4528-b9c4-89b32863c2eb-etc-swift\") pod \"swift-proxy-88cf5ccd5-wf5cp\" (UID: \"74566d25-db70-4528-b9c4-89b32863c2eb\") " pod="openstack/swift-proxy-88cf5ccd5-wf5cp" Oct 07 08:14:33 crc kubenswrapper[4875]: I1007 08:14:33.875525 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/74566d25-db70-4528-b9c4-89b32863c2eb-config-data\") pod \"swift-proxy-88cf5ccd5-wf5cp\" (UID: \"74566d25-db70-4528-b9c4-89b32863c2eb\") " pod="openstack/swift-proxy-88cf5ccd5-wf5cp" Oct 07 08:14:33 crc kubenswrapper[4875]: I1007 08:14:33.884645 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7t229\" (UniqueName: \"kubernetes.io/projected/74566d25-db70-4528-b9c4-89b32863c2eb-kube-api-access-7t229\") pod \"swift-proxy-88cf5ccd5-wf5cp\" (UID: \"74566d25-db70-4528-b9c4-89b32863c2eb\") " pod="openstack/swift-proxy-88cf5ccd5-wf5cp" Oct 07 08:14:33 crc kubenswrapper[4875]: I1007 08:14:33.995573 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-88cf5ccd5-wf5cp" Oct 07 08:14:34 crc kubenswrapper[4875]: I1007 08:14:34.271451 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 07 08:14:34 crc kubenswrapper[4875]: I1007 08:14:34.272370 4875 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="9c287068-593f-426c-b370-94917005d8e9" containerName="proxy-httpd" containerID="cri-o://1bb12e888b0e23138aba16b6f95f527c73a5981077075641de7e2ca210fe1511" gracePeriod=30 Oct 07 08:14:34 crc kubenswrapper[4875]: I1007 08:14:34.272719 4875 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="9c287068-593f-426c-b370-94917005d8e9" containerName="sg-core" containerID="cri-o://fe45f768b02a79b62860547256922c9c8528953ecddd76b1b453c15e676bd542" gracePeriod=30 Oct 07 08:14:34 crc kubenswrapper[4875]: I1007 08:14:34.272790 4875 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="9c287068-593f-426c-b370-94917005d8e9" containerName="ceilometer-notification-agent" containerID="cri-o://f78036b224835188f862fe10d2995d3a4ffe5f69491a003ff044dbf55ee172c8" gracePeriod=30 Oct 07 08:14:34 crc kubenswrapper[4875]: I1007 08:14:34.272851 4875 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="9c287068-593f-426c-b370-94917005d8e9" containerName="ceilometer-central-agent" containerID="cri-o://1f85e3fefaf6f6ea41101cd9a4723ea34065be2da1dca2044c9c9d14eef64dff" gracePeriod=30 Oct 07 08:14:34 crc kubenswrapper[4875]: I1007 08:14:34.278526 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Oct 07 08:14:34 crc kubenswrapper[4875]: I1007 08:14:34.576898 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-88cf5ccd5-wf5cp"] Oct 07 08:14:34 crc kubenswrapper[4875]: W1007 08:14:34.584325 4875 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod74566d25_db70_4528_b9c4_89b32863c2eb.slice/crio-838a7d3d2aac4017b2e85a683e03ee8b3b681877e5db0d8499ec767ac0b783ba WatchSource:0}: Error finding container 838a7d3d2aac4017b2e85a683e03ee8b3b681877e5db0d8499ec767ac0b783ba: Status 404 returned error can't find the container with id 838a7d3d2aac4017b2e85a683e03ee8b3b681877e5db0d8499ec767ac0b783ba Oct 07 08:14:35 crc kubenswrapper[4875]: I1007 08:14:35.007103 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-88cf5ccd5-wf5cp" event={"ID":"74566d25-db70-4528-b9c4-89b32863c2eb","Type":"ContainerStarted","Data":"ce468479b2e5688569374376e4f14ebdbce65a79bf45c78ac18e6226342a8b76"} Oct 07 08:14:35 crc kubenswrapper[4875]: I1007 08:14:35.008051 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-88cf5ccd5-wf5cp" event={"ID":"74566d25-db70-4528-b9c4-89b32863c2eb","Type":"ContainerStarted","Data":"838a7d3d2aac4017b2e85a683e03ee8b3b681877e5db0d8499ec767ac0b783ba"} Oct 07 08:14:35 crc kubenswrapper[4875]: I1007 08:14:35.013920 4875 generic.go:334] "Generic (PLEG): container finished" podID="9c287068-593f-426c-b370-94917005d8e9" containerID="1bb12e888b0e23138aba16b6f95f527c73a5981077075641de7e2ca210fe1511" exitCode=0 Oct 07 08:14:35 crc kubenswrapper[4875]: I1007 08:14:35.013951 4875 generic.go:334] "Generic (PLEG): container finished" podID="9c287068-593f-426c-b370-94917005d8e9" containerID="fe45f768b02a79b62860547256922c9c8528953ecddd76b1b453c15e676bd542" exitCode=2 Oct 07 08:14:35 crc kubenswrapper[4875]: I1007 08:14:35.013959 4875 generic.go:334] "Generic (PLEG): container finished" podID="9c287068-593f-426c-b370-94917005d8e9" containerID="1f85e3fefaf6f6ea41101cd9a4723ea34065be2da1dca2044c9c9d14eef64dff" exitCode=0 Oct 07 08:14:35 crc kubenswrapper[4875]: I1007 08:14:35.013977 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"9c287068-593f-426c-b370-94917005d8e9","Type":"ContainerDied","Data":"1bb12e888b0e23138aba16b6f95f527c73a5981077075641de7e2ca210fe1511"} Oct 07 08:14:35 crc kubenswrapper[4875]: I1007 08:14:35.013996 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"9c287068-593f-426c-b370-94917005d8e9","Type":"ContainerDied","Data":"fe45f768b02a79b62860547256922c9c8528953ecddd76b1b453c15e676bd542"} Oct 07 08:14:35 crc kubenswrapper[4875]: I1007 08:14:35.014006 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"9c287068-593f-426c-b370-94917005d8e9","Type":"ContainerDied","Data":"1f85e3fefaf6f6ea41101cd9a4723ea34065be2da1dca2044c9c9d14eef64dff"} Oct 07 08:14:36 crc kubenswrapper[4875]: I1007 08:14:36.025143 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-88cf5ccd5-wf5cp" event={"ID":"74566d25-db70-4528-b9c4-89b32863c2eb","Type":"ContainerStarted","Data":"ba9216fe96e5d1a60a14a3c8f695514e45122d68b3a909172749b34f56ca4bc6"} Oct 07 08:14:36 crc kubenswrapper[4875]: I1007 08:14:36.025899 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-88cf5ccd5-wf5cp" Oct 07 08:14:36 crc kubenswrapper[4875]: I1007 08:14:36.025919 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-88cf5ccd5-wf5cp" Oct 07 08:14:36 crc kubenswrapper[4875]: I1007 08:14:36.061153 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-proxy-88cf5ccd5-wf5cp" podStartSLOduration=3.061131469 podStartE2EDuration="3.061131469s" podCreationTimestamp="2025-10-07 08:14:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 08:14:36.045132275 +0000 UTC m=+1101.004902818" watchObservedRunningTime="2025-10-07 08:14:36.061131469 +0000 UTC m=+1101.020902012" Oct 07 08:14:37 crc kubenswrapper[4875]: I1007 08:14:37.885512 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-db-create-spt5r"] Oct 07 08:14:37 crc kubenswrapper[4875]: I1007 08:14:37.887207 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-spt5r" Oct 07 08:14:37 crc kubenswrapper[4875]: I1007 08:14:37.896954 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-spt5r"] Oct 07 08:14:37 crc kubenswrapper[4875]: I1007 08:14:37.948746 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5b5ws\" (UniqueName: \"kubernetes.io/projected/39a4c6c6-dcf7-4d49-9a79-44258c5db723-kube-api-access-5b5ws\") pod \"nova-api-db-create-spt5r\" (UID: \"39a4c6c6-dcf7-4d49-9a79-44258c5db723\") " pod="openstack/nova-api-db-create-spt5r" Oct 07 08:14:37 crc kubenswrapper[4875]: I1007 08:14:37.978675 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-db-create-x7f47"] Oct 07 08:14:37 crc kubenswrapper[4875]: I1007 08:14:37.980000 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-x7f47" Oct 07 08:14:37 crc kubenswrapper[4875]: I1007 08:14:37.992906 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-x7f47"] Oct 07 08:14:38 crc kubenswrapper[4875]: I1007 08:14:38.049221 4875 generic.go:334] "Generic (PLEG): container finished" podID="9c287068-593f-426c-b370-94917005d8e9" containerID="f78036b224835188f862fe10d2995d3a4ffe5f69491a003ff044dbf55ee172c8" exitCode=0 Oct 07 08:14:38 crc kubenswrapper[4875]: I1007 08:14:38.049288 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"9c287068-593f-426c-b370-94917005d8e9","Type":"ContainerDied","Data":"f78036b224835188f862fe10d2995d3a4ffe5f69491a003ff044dbf55ee172c8"} Oct 07 08:14:38 crc kubenswrapper[4875]: I1007 08:14:38.050535 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5b5ws\" (UniqueName: \"kubernetes.io/projected/39a4c6c6-dcf7-4d49-9a79-44258c5db723-kube-api-access-5b5ws\") pod \"nova-api-db-create-spt5r\" (UID: \"39a4c6c6-dcf7-4d49-9a79-44258c5db723\") " pod="openstack/nova-api-db-create-spt5r" Oct 07 08:14:38 crc kubenswrapper[4875]: I1007 08:14:38.050658 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pzncw\" (UniqueName: \"kubernetes.io/projected/ceb89a19-ace2-4804-8d6d-236266c6d7d9-kube-api-access-pzncw\") pod \"nova-cell0-db-create-x7f47\" (UID: \"ceb89a19-ace2-4804-8d6d-236266c6d7d9\") " pod="openstack/nova-cell0-db-create-x7f47" Oct 07 08:14:38 crc kubenswrapper[4875]: I1007 08:14:38.080341 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5b5ws\" (UniqueName: \"kubernetes.io/projected/39a4c6c6-dcf7-4d49-9a79-44258c5db723-kube-api-access-5b5ws\") pod \"nova-api-db-create-spt5r\" (UID: \"39a4c6c6-dcf7-4d49-9a79-44258c5db723\") " pod="openstack/nova-api-db-create-spt5r" Oct 07 08:14:38 crc kubenswrapper[4875]: I1007 08:14:38.154040 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pzncw\" (UniqueName: \"kubernetes.io/projected/ceb89a19-ace2-4804-8d6d-236266c6d7d9-kube-api-access-pzncw\") pod \"nova-cell0-db-create-x7f47\" (UID: \"ceb89a19-ace2-4804-8d6d-236266c6d7d9\") " pod="openstack/nova-cell0-db-create-x7f47" Oct 07 08:14:38 crc kubenswrapper[4875]: I1007 08:14:38.177165 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pzncw\" (UniqueName: \"kubernetes.io/projected/ceb89a19-ace2-4804-8d6d-236266c6d7d9-kube-api-access-pzncw\") pod \"nova-cell0-db-create-x7f47\" (UID: \"ceb89a19-ace2-4804-8d6d-236266c6d7d9\") " pod="openstack/nova-cell0-db-create-x7f47" Oct 07 08:14:38 crc kubenswrapper[4875]: I1007 08:14:38.193516 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-db-create-xkx25"] Oct 07 08:14:38 crc kubenswrapper[4875]: I1007 08:14:38.195009 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-xkx25" Oct 07 08:14:38 crc kubenswrapper[4875]: I1007 08:14:38.207603 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-xkx25"] Oct 07 08:14:38 crc kubenswrapper[4875]: I1007 08:14:38.219139 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-spt5r" Oct 07 08:14:38 crc kubenswrapper[4875]: I1007 08:14:38.255418 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-48ls5\" (UniqueName: \"kubernetes.io/projected/5e168f8b-b075-472f-87c8-84a101d9ffda-kube-api-access-48ls5\") pod \"nova-cell1-db-create-xkx25\" (UID: \"5e168f8b-b075-472f-87c8-84a101d9ffda\") " pod="openstack/nova-cell1-db-create-xkx25" Oct 07 08:14:38 crc kubenswrapper[4875]: I1007 08:14:38.313164 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-x7f47" Oct 07 08:14:38 crc kubenswrapper[4875]: I1007 08:14:38.357388 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-48ls5\" (UniqueName: \"kubernetes.io/projected/5e168f8b-b075-472f-87c8-84a101d9ffda-kube-api-access-48ls5\") pod \"nova-cell1-db-create-xkx25\" (UID: \"5e168f8b-b075-472f-87c8-84a101d9ffda\") " pod="openstack/nova-cell1-db-create-xkx25" Oct 07 08:14:38 crc kubenswrapper[4875]: I1007 08:14:38.392793 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-48ls5\" (UniqueName: \"kubernetes.io/projected/5e168f8b-b075-472f-87c8-84a101d9ffda-kube-api-access-48ls5\") pod \"nova-cell1-db-create-xkx25\" (UID: \"5e168f8b-b075-472f-87c8-84a101d9ffda\") " pod="openstack/nova-cell1-db-create-xkx25" Oct 07 08:14:38 crc kubenswrapper[4875]: I1007 08:14:38.533114 4875 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Oct 07 08:14:38 crc kubenswrapper[4875]: I1007 08:14:38.545611 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-xkx25" Oct 07 08:14:38 crc kubenswrapper[4875]: I1007 08:14:38.588277 4875 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-8486dbbd8b-5n679" podUID="35b9d27a-06f2-4b00-917f-f078fdf1b1c2" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.148:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.148:8443: connect: connection refused" Oct 07 08:14:41 crc kubenswrapper[4875]: I1007 08:14:41.915614 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 07 08:14:41 crc kubenswrapper[4875]: I1007 08:14:41.959229 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/9c287068-593f-426c-b370-94917005d8e9-sg-core-conf-yaml\") pod \"9c287068-593f-426c-b370-94917005d8e9\" (UID: \"9c287068-593f-426c-b370-94917005d8e9\") " Oct 07 08:14:41 crc kubenswrapper[4875]: I1007 08:14:41.959313 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9c287068-593f-426c-b370-94917005d8e9-run-httpd\") pod \"9c287068-593f-426c-b370-94917005d8e9\" (UID: \"9c287068-593f-426c-b370-94917005d8e9\") " Oct 07 08:14:41 crc kubenswrapper[4875]: I1007 08:14:41.959415 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9c287068-593f-426c-b370-94917005d8e9-log-httpd\") pod \"9c287068-593f-426c-b370-94917005d8e9\" (UID: \"9c287068-593f-426c-b370-94917005d8e9\") " Oct 07 08:14:41 crc kubenswrapper[4875]: I1007 08:14:41.959437 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9c287068-593f-426c-b370-94917005d8e9-config-data\") pod \"9c287068-593f-426c-b370-94917005d8e9\" (UID: \"9c287068-593f-426c-b370-94917005d8e9\") " Oct 07 08:14:41 crc kubenswrapper[4875]: I1007 08:14:41.959553 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9c287068-593f-426c-b370-94917005d8e9-combined-ca-bundle\") pod \"9c287068-593f-426c-b370-94917005d8e9\" (UID: \"9c287068-593f-426c-b370-94917005d8e9\") " Oct 07 08:14:41 crc kubenswrapper[4875]: I1007 08:14:41.959633 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pggvw\" (UniqueName: \"kubernetes.io/projected/9c287068-593f-426c-b370-94917005d8e9-kube-api-access-pggvw\") pod \"9c287068-593f-426c-b370-94917005d8e9\" (UID: \"9c287068-593f-426c-b370-94917005d8e9\") " Oct 07 08:14:41 crc kubenswrapper[4875]: I1007 08:14:41.959687 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9c287068-593f-426c-b370-94917005d8e9-scripts\") pod \"9c287068-593f-426c-b370-94917005d8e9\" (UID: \"9c287068-593f-426c-b370-94917005d8e9\") " Oct 07 08:14:41 crc kubenswrapper[4875]: I1007 08:14:41.961846 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9c287068-593f-426c-b370-94917005d8e9-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "9c287068-593f-426c-b370-94917005d8e9" (UID: "9c287068-593f-426c-b370-94917005d8e9"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 08:14:41 crc kubenswrapper[4875]: I1007 08:14:41.962992 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9c287068-593f-426c-b370-94917005d8e9-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "9c287068-593f-426c-b370-94917005d8e9" (UID: "9c287068-593f-426c-b370-94917005d8e9"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 08:14:41 crc kubenswrapper[4875]: I1007 08:14:41.968809 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9c287068-593f-426c-b370-94917005d8e9-scripts" (OuterVolumeSpecName: "scripts") pod "9c287068-593f-426c-b370-94917005d8e9" (UID: "9c287068-593f-426c-b370-94917005d8e9"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:14:41 crc kubenswrapper[4875]: I1007 08:14:41.975076 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9c287068-593f-426c-b370-94917005d8e9-kube-api-access-pggvw" (OuterVolumeSpecName: "kube-api-access-pggvw") pod "9c287068-593f-426c-b370-94917005d8e9" (UID: "9c287068-593f-426c-b370-94917005d8e9"). InnerVolumeSpecName "kube-api-access-pggvw". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 08:14:42 crc kubenswrapper[4875]: I1007 08:14:42.022981 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9c287068-593f-426c-b370-94917005d8e9-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "9c287068-593f-426c-b370-94917005d8e9" (UID: "9c287068-593f-426c-b370-94917005d8e9"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:14:42 crc kubenswrapper[4875]: I1007 08:14:42.062428 4875 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9c287068-593f-426c-b370-94917005d8e9-scripts\") on node \"crc\" DevicePath \"\"" Oct 07 08:14:42 crc kubenswrapper[4875]: I1007 08:14:42.062469 4875 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/9c287068-593f-426c-b370-94917005d8e9-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Oct 07 08:14:42 crc kubenswrapper[4875]: I1007 08:14:42.062482 4875 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9c287068-593f-426c-b370-94917005d8e9-run-httpd\") on node \"crc\" DevicePath \"\"" Oct 07 08:14:42 crc kubenswrapper[4875]: I1007 08:14:42.062494 4875 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9c287068-593f-426c-b370-94917005d8e9-log-httpd\") on node \"crc\" DevicePath \"\"" Oct 07 08:14:42 crc kubenswrapper[4875]: I1007 08:14:42.062507 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pggvw\" (UniqueName: \"kubernetes.io/projected/9c287068-593f-426c-b370-94917005d8e9-kube-api-access-pggvw\") on node \"crc\" DevicePath \"\"" Oct 07 08:14:42 crc kubenswrapper[4875]: I1007 08:14:42.066637 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9c287068-593f-426c-b370-94917005d8e9-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9c287068-593f-426c-b370-94917005d8e9" (UID: "9c287068-593f-426c-b370-94917005d8e9"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:14:42 crc kubenswrapper[4875]: I1007 08:14:42.109578 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"c25e751b-f41b-4571-92e4-81d1b263ed48","Type":"ContainerStarted","Data":"4fd5db9d618121b546ffa1674d7d9ac9e799856fb5d8f420e6d50a9a7decd269"} Oct 07 08:14:42 crc kubenswrapper[4875]: I1007 08:14:42.126431 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"9c287068-593f-426c-b370-94917005d8e9","Type":"ContainerDied","Data":"36b49f11468d343a739eaca8aa7c9dd5e134e49467862e07684cd659cedcc05e"} Oct 07 08:14:42 crc kubenswrapper[4875]: I1007 08:14:42.126483 4875 scope.go:117] "RemoveContainer" containerID="1bb12e888b0e23138aba16b6f95f527c73a5981077075641de7e2ca210fe1511" Oct 07 08:14:42 crc kubenswrapper[4875]: I1007 08:14:42.126652 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 07 08:14:42 crc kubenswrapper[4875]: I1007 08:14:42.133012 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9c287068-593f-426c-b370-94917005d8e9-config-data" (OuterVolumeSpecName: "config-data") pod "9c287068-593f-426c-b370-94917005d8e9" (UID: "9c287068-593f-426c-b370-94917005d8e9"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:14:42 crc kubenswrapper[4875]: I1007 08:14:42.142044 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-spt5r"] Oct 07 08:14:42 crc kubenswrapper[4875]: I1007 08:14:42.154383 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstackclient" podStartSLOduration=2.192938274 podStartE2EDuration="13.154363299s" podCreationTimestamp="2025-10-07 08:14:29 +0000 UTC" firstStartedPulling="2025-10-07 08:14:30.65072686 +0000 UTC m=+1095.610497403" lastFinishedPulling="2025-10-07 08:14:41.612151885 +0000 UTC m=+1106.571922428" observedRunningTime="2025-10-07 08:14:42.129801385 +0000 UTC m=+1107.089571918" watchObservedRunningTime="2025-10-07 08:14:42.154363299 +0000 UTC m=+1107.114133842" Oct 07 08:14:42 crc kubenswrapper[4875]: I1007 08:14:42.161362 4875 scope.go:117] "RemoveContainer" containerID="fe45f768b02a79b62860547256922c9c8528953ecddd76b1b453c15e676bd542" Oct 07 08:14:42 crc kubenswrapper[4875]: I1007 08:14:42.166305 4875 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9c287068-593f-426c-b370-94917005d8e9-config-data\") on node \"crc\" DevicePath \"\"" Oct 07 08:14:42 crc kubenswrapper[4875]: I1007 08:14:42.166338 4875 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9c287068-593f-426c-b370-94917005d8e9-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 08:14:42 crc kubenswrapper[4875]: I1007 08:14:42.221314 4875 scope.go:117] "RemoveContainer" containerID="f78036b224835188f862fe10d2995d3a4ffe5f69491a003ff044dbf55ee172c8" Oct 07 08:14:42 crc kubenswrapper[4875]: I1007 08:14:42.255133 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-xkx25"] Oct 07 08:14:42 crc kubenswrapper[4875]: I1007 08:14:42.264793 4875 scope.go:117] "RemoveContainer" containerID="1f85e3fefaf6f6ea41101cd9a4723ea34065be2da1dca2044c9c9d14eef64dff" Oct 07 08:14:42 crc kubenswrapper[4875]: I1007 08:14:42.348469 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-x7f47"] Oct 07 08:14:42 crc kubenswrapper[4875]: W1007 08:14:42.351637 4875 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podceb89a19_ace2_4804_8d6d_236266c6d7d9.slice/crio-7f73bbd2b1071e2805befcf4de60dcf92dabedb7611ec124b0553647ea9b9646 WatchSource:0}: Error finding container 7f73bbd2b1071e2805befcf4de60dcf92dabedb7611ec124b0553647ea9b9646: Status 404 returned error can't find the container with id 7f73bbd2b1071e2805befcf4de60dcf92dabedb7611ec124b0553647ea9b9646 Oct 07 08:14:42 crc kubenswrapper[4875]: I1007 08:14:42.479327 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 07 08:14:42 crc kubenswrapper[4875]: I1007 08:14:42.489679 4875 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Oct 07 08:14:42 crc kubenswrapper[4875]: I1007 08:14:42.544854 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Oct 07 08:14:42 crc kubenswrapper[4875]: E1007 08:14:42.546627 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9c287068-593f-426c-b370-94917005d8e9" containerName="ceilometer-central-agent" Oct 07 08:14:42 crc kubenswrapper[4875]: I1007 08:14:42.546725 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="9c287068-593f-426c-b370-94917005d8e9" containerName="ceilometer-central-agent" Oct 07 08:14:42 crc kubenswrapper[4875]: E1007 08:14:42.546850 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9c287068-593f-426c-b370-94917005d8e9" containerName="proxy-httpd" Oct 07 08:14:42 crc kubenswrapper[4875]: I1007 08:14:42.546960 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="9c287068-593f-426c-b370-94917005d8e9" containerName="proxy-httpd" Oct 07 08:14:42 crc kubenswrapper[4875]: E1007 08:14:42.547833 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9c287068-593f-426c-b370-94917005d8e9" containerName="ceilometer-notification-agent" Oct 07 08:14:42 crc kubenswrapper[4875]: I1007 08:14:42.548047 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="9c287068-593f-426c-b370-94917005d8e9" containerName="ceilometer-notification-agent" Oct 07 08:14:42 crc kubenswrapper[4875]: E1007 08:14:42.548127 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9c287068-593f-426c-b370-94917005d8e9" containerName="sg-core" Oct 07 08:14:42 crc kubenswrapper[4875]: I1007 08:14:42.548261 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="9c287068-593f-426c-b370-94917005d8e9" containerName="sg-core" Oct 07 08:14:42 crc kubenswrapper[4875]: I1007 08:14:42.548836 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="9c287068-593f-426c-b370-94917005d8e9" containerName="sg-core" Oct 07 08:14:42 crc kubenswrapper[4875]: I1007 08:14:42.548988 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="9c287068-593f-426c-b370-94917005d8e9" containerName="ceilometer-central-agent" Oct 07 08:14:42 crc kubenswrapper[4875]: I1007 08:14:42.549092 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="9c287068-593f-426c-b370-94917005d8e9" containerName="proxy-httpd" Oct 07 08:14:42 crc kubenswrapper[4875]: I1007 08:14:42.549176 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="9c287068-593f-426c-b370-94917005d8e9" containerName="ceilometer-notification-agent" Oct 07 08:14:42 crc kubenswrapper[4875]: I1007 08:14:42.553645 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 07 08:14:42 crc kubenswrapper[4875]: I1007 08:14:42.555115 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 07 08:14:42 crc kubenswrapper[4875]: I1007 08:14:42.557442 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Oct 07 08:14:42 crc kubenswrapper[4875]: I1007 08:14:42.558248 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Oct 07 08:14:42 crc kubenswrapper[4875]: I1007 08:14:42.700083 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/add9c499-b0a8-4e7c-b837-3e9baa672ccb-scripts\") pod \"ceilometer-0\" (UID: \"add9c499-b0a8-4e7c-b837-3e9baa672ccb\") " pod="openstack/ceilometer-0" Oct 07 08:14:42 crc kubenswrapper[4875]: I1007 08:14:42.700240 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/add9c499-b0a8-4e7c-b837-3e9baa672ccb-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"add9c499-b0a8-4e7c-b837-3e9baa672ccb\") " pod="openstack/ceilometer-0" Oct 07 08:14:42 crc kubenswrapper[4875]: I1007 08:14:42.700331 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/add9c499-b0a8-4e7c-b837-3e9baa672ccb-run-httpd\") pod \"ceilometer-0\" (UID: \"add9c499-b0a8-4e7c-b837-3e9baa672ccb\") " pod="openstack/ceilometer-0" Oct 07 08:14:42 crc kubenswrapper[4875]: I1007 08:14:42.700496 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/add9c499-b0a8-4e7c-b837-3e9baa672ccb-log-httpd\") pod \"ceilometer-0\" (UID: \"add9c499-b0a8-4e7c-b837-3e9baa672ccb\") " pod="openstack/ceilometer-0" Oct 07 08:14:42 crc kubenswrapper[4875]: I1007 08:14:42.700802 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/add9c499-b0a8-4e7c-b837-3e9baa672ccb-config-data\") pod \"ceilometer-0\" (UID: \"add9c499-b0a8-4e7c-b837-3e9baa672ccb\") " pod="openstack/ceilometer-0" Oct 07 08:14:42 crc kubenswrapper[4875]: I1007 08:14:42.701014 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/add9c499-b0a8-4e7c-b837-3e9baa672ccb-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"add9c499-b0a8-4e7c-b837-3e9baa672ccb\") " pod="openstack/ceilometer-0" Oct 07 08:14:42 crc kubenswrapper[4875]: I1007 08:14:42.701276 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z9jbj\" (UniqueName: \"kubernetes.io/projected/add9c499-b0a8-4e7c-b837-3e9baa672ccb-kube-api-access-z9jbj\") pod \"ceilometer-0\" (UID: \"add9c499-b0a8-4e7c-b837-3e9baa672ccb\") " pod="openstack/ceilometer-0" Oct 07 08:14:42 crc kubenswrapper[4875]: I1007 08:14:42.803057 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/add9c499-b0a8-4e7c-b837-3e9baa672ccb-config-data\") pod \"ceilometer-0\" (UID: \"add9c499-b0a8-4e7c-b837-3e9baa672ccb\") " pod="openstack/ceilometer-0" Oct 07 08:14:42 crc kubenswrapper[4875]: I1007 08:14:42.803135 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/add9c499-b0a8-4e7c-b837-3e9baa672ccb-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"add9c499-b0a8-4e7c-b837-3e9baa672ccb\") " pod="openstack/ceilometer-0" Oct 07 08:14:42 crc kubenswrapper[4875]: I1007 08:14:42.803176 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z9jbj\" (UniqueName: \"kubernetes.io/projected/add9c499-b0a8-4e7c-b837-3e9baa672ccb-kube-api-access-z9jbj\") pod \"ceilometer-0\" (UID: \"add9c499-b0a8-4e7c-b837-3e9baa672ccb\") " pod="openstack/ceilometer-0" Oct 07 08:14:42 crc kubenswrapper[4875]: I1007 08:14:42.803210 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/add9c499-b0a8-4e7c-b837-3e9baa672ccb-scripts\") pod \"ceilometer-0\" (UID: \"add9c499-b0a8-4e7c-b837-3e9baa672ccb\") " pod="openstack/ceilometer-0" Oct 07 08:14:42 crc kubenswrapper[4875]: I1007 08:14:42.803229 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/add9c499-b0a8-4e7c-b837-3e9baa672ccb-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"add9c499-b0a8-4e7c-b837-3e9baa672ccb\") " pod="openstack/ceilometer-0" Oct 07 08:14:42 crc kubenswrapper[4875]: I1007 08:14:42.803256 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/add9c499-b0a8-4e7c-b837-3e9baa672ccb-run-httpd\") pod \"ceilometer-0\" (UID: \"add9c499-b0a8-4e7c-b837-3e9baa672ccb\") " pod="openstack/ceilometer-0" Oct 07 08:14:42 crc kubenswrapper[4875]: I1007 08:14:42.803315 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/add9c499-b0a8-4e7c-b837-3e9baa672ccb-log-httpd\") pod \"ceilometer-0\" (UID: \"add9c499-b0a8-4e7c-b837-3e9baa672ccb\") " pod="openstack/ceilometer-0" Oct 07 08:14:42 crc kubenswrapper[4875]: I1007 08:14:42.804435 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/add9c499-b0a8-4e7c-b837-3e9baa672ccb-log-httpd\") pod \"ceilometer-0\" (UID: \"add9c499-b0a8-4e7c-b837-3e9baa672ccb\") " pod="openstack/ceilometer-0" Oct 07 08:14:42 crc kubenswrapper[4875]: I1007 08:14:42.804501 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/add9c499-b0a8-4e7c-b837-3e9baa672ccb-run-httpd\") pod \"ceilometer-0\" (UID: \"add9c499-b0a8-4e7c-b837-3e9baa672ccb\") " pod="openstack/ceilometer-0" Oct 07 08:14:42 crc kubenswrapper[4875]: I1007 08:14:42.808148 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/add9c499-b0a8-4e7c-b837-3e9baa672ccb-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"add9c499-b0a8-4e7c-b837-3e9baa672ccb\") " pod="openstack/ceilometer-0" Oct 07 08:14:42 crc kubenswrapper[4875]: I1007 08:14:42.808938 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/add9c499-b0a8-4e7c-b837-3e9baa672ccb-scripts\") pod \"ceilometer-0\" (UID: \"add9c499-b0a8-4e7c-b837-3e9baa672ccb\") " pod="openstack/ceilometer-0" Oct 07 08:14:42 crc kubenswrapper[4875]: I1007 08:14:42.817270 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/add9c499-b0a8-4e7c-b837-3e9baa672ccb-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"add9c499-b0a8-4e7c-b837-3e9baa672ccb\") " pod="openstack/ceilometer-0" Oct 07 08:14:42 crc kubenswrapper[4875]: I1007 08:14:42.820228 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/add9c499-b0a8-4e7c-b837-3e9baa672ccb-config-data\") pod \"ceilometer-0\" (UID: \"add9c499-b0a8-4e7c-b837-3e9baa672ccb\") " pod="openstack/ceilometer-0" Oct 07 08:14:42 crc kubenswrapper[4875]: I1007 08:14:42.826576 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z9jbj\" (UniqueName: \"kubernetes.io/projected/add9c499-b0a8-4e7c-b837-3e9baa672ccb-kube-api-access-z9jbj\") pod \"ceilometer-0\" (UID: \"add9c499-b0a8-4e7c-b837-3e9baa672ccb\") " pod="openstack/ceilometer-0" Oct 07 08:14:42 crc kubenswrapper[4875]: I1007 08:14:42.918461 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 07 08:14:43 crc kubenswrapper[4875]: I1007 08:14:43.140316 4875 generic.go:334] "Generic (PLEG): container finished" podID="5e168f8b-b075-472f-87c8-84a101d9ffda" containerID="1e570e6a35fda4ee948de23ad26f6b10fc9e46be47a6cf82678622f6abcc709d" exitCode=0 Oct 07 08:14:43 crc kubenswrapper[4875]: I1007 08:14:43.140397 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-xkx25" event={"ID":"5e168f8b-b075-472f-87c8-84a101d9ffda","Type":"ContainerDied","Data":"1e570e6a35fda4ee948de23ad26f6b10fc9e46be47a6cf82678622f6abcc709d"} Oct 07 08:14:43 crc kubenswrapper[4875]: I1007 08:14:43.140439 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-xkx25" event={"ID":"5e168f8b-b075-472f-87c8-84a101d9ffda","Type":"ContainerStarted","Data":"30adb759904c92d7d04b1bde7fe784e5bb8020645b31a88d2c0fa7f076f5614b"} Oct 07 08:14:43 crc kubenswrapper[4875]: I1007 08:14:43.147966 4875 generic.go:334] "Generic (PLEG): container finished" podID="ceb89a19-ace2-4804-8d6d-236266c6d7d9" containerID="4368636d8afeaf9948fee4dad41b7134d140462d62c2e5b6cf6300355e078e1e" exitCode=0 Oct 07 08:14:43 crc kubenswrapper[4875]: I1007 08:14:43.148036 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-x7f47" event={"ID":"ceb89a19-ace2-4804-8d6d-236266c6d7d9","Type":"ContainerDied","Data":"4368636d8afeaf9948fee4dad41b7134d140462d62c2e5b6cf6300355e078e1e"} Oct 07 08:14:43 crc kubenswrapper[4875]: I1007 08:14:43.148064 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-x7f47" event={"ID":"ceb89a19-ace2-4804-8d6d-236266c6d7d9","Type":"ContainerStarted","Data":"7f73bbd2b1071e2805befcf4de60dcf92dabedb7611ec124b0553647ea9b9646"} Oct 07 08:14:43 crc kubenswrapper[4875]: I1007 08:14:43.164721 4875 generic.go:334] "Generic (PLEG): container finished" podID="39a4c6c6-dcf7-4d49-9a79-44258c5db723" containerID="b32bfeee11314ba06dea3eb1c48ed2b56b1524d234d3fdba24eb9be7de4ea1a9" exitCode=0 Oct 07 08:14:43 crc kubenswrapper[4875]: I1007 08:14:43.164796 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-spt5r" event={"ID":"39a4c6c6-dcf7-4d49-9a79-44258c5db723","Type":"ContainerDied","Data":"b32bfeee11314ba06dea3eb1c48ed2b56b1524d234d3fdba24eb9be7de4ea1a9"} Oct 07 08:14:43 crc kubenswrapper[4875]: I1007 08:14:43.164822 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-spt5r" event={"ID":"39a4c6c6-dcf7-4d49-9a79-44258c5db723","Type":"ContainerStarted","Data":"d788435c7c2495618bdaccda2ce77756525bf0a96358b0d348962d3b75621974"} Oct 07 08:14:43 crc kubenswrapper[4875]: I1007 08:14:43.431493 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 07 08:14:43 crc kubenswrapper[4875]: W1007 08:14:43.441057 4875 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podadd9c499_b0a8_4e7c_b837_3e9baa672ccb.slice/crio-ec9b70f2c312d2a6eb6aa24c36bfbce6823afe3b043defa6a101ff240e4bc882 WatchSource:0}: Error finding container ec9b70f2c312d2a6eb6aa24c36bfbce6823afe3b043defa6a101ff240e4bc882: Status 404 returned error can't find the container with id ec9b70f2c312d2a6eb6aa24c36bfbce6823afe3b043defa6a101ff240e4bc882 Oct 07 08:14:43 crc kubenswrapper[4875]: I1007 08:14:43.719775 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9c287068-593f-426c-b370-94917005d8e9" path="/var/lib/kubelet/pods/9c287068-593f-426c-b370-94917005d8e9/volumes" Oct 07 08:14:44 crc kubenswrapper[4875]: I1007 08:14:44.003115 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-88cf5ccd5-wf5cp" Oct 07 08:14:44 crc kubenswrapper[4875]: I1007 08:14:44.005475 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-88cf5ccd5-wf5cp" Oct 07 08:14:44 crc kubenswrapper[4875]: I1007 08:14:44.180681 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"add9c499-b0a8-4e7c-b837-3e9baa672ccb","Type":"ContainerStarted","Data":"ec9b70f2c312d2a6eb6aa24c36bfbce6823afe3b043defa6a101ff240e4bc882"} Oct 07 08:14:44 crc kubenswrapper[4875]: I1007 08:14:44.634392 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-x7f47" Oct 07 08:14:44 crc kubenswrapper[4875]: I1007 08:14:44.688218 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-spt5r" Oct 07 08:14:44 crc kubenswrapper[4875]: I1007 08:14:44.725602 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-xkx25" Oct 07 08:14:44 crc kubenswrapper[4875]: I1007 08:14:44.755649 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pzncw\" (UniqueName: \"kubernetes.io/projected/ceb89a19-ace2-4804-8d6d-236266c6d7d9-kube-api-access-pzncw\") pod \"ceb89a19-ace2-4804-8d6d-236266c6d7d9\" (UID: \"ceb89a19-ace2-4804-8d6d-236266c6d7d9\") " Oct 07 08:14:44 crc kubenswrapper[4875]: I1007 08:14:44.755738 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5b5ws\" (UniqueName: \"kubernetes.io/projected/39a4c6c6-dcf7-4d49-9a79-44258c5db723-kube-api-access-5b5ws\") pod \"39a4c6c6-dcf7-4d49-9a79-44258c5db723\" (UID: \"39a4c6c6-dcf7-4d49-9a79-44258c5db723\") " Oct 07 08:14:44 crc kubenswrapper[4875]: I1007 08:14:44.775628 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ceb89a19-ace2-4804-8d6d-236266c6d7d9-kube-api-access-pzncw" (OuterVolumeSpecName: "kube-api-access-pzncw") pod "ceb89a19-ace2-4804-8d6d-236266c6d7d9" (UID: "ceb89a19-ace2-4804-8d6d-236266c6d7d9"). InnerVolumeSpecName "kube-api-access-pzncw". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 08:14:44 crc kubenswrapper[4875]: I1007 08:14:44.785483 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/39a4c6c6-dcf7-4d49-9a79-44258c5db723-kube-api-access-5b5ws" (OuterVolumeSpecName: "kube-api-access-5b5ws") pod "39a4c6c6-dcf7-4d49-9a79-44258c5db723" (UID: "39a4c6c6-dcf7-4d49-9a79-44258c5db723"). InnerVolumeSpecName "kube-api-access-5b5ws". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 08:14:44 crc kubenswrapper[4875]: I1007 08:14:44.857978 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-48ls5\" (UniqueName: \"kubernetes.io/projected/5e168f8b-b075-472f-87c8-84a101d9ffda-kube-api-access-48ls5\") pod \"5e168f8b-b075-472f-87c8-84a101d9ffda\" (UID: \"5e168f8b-b075-472f-87c8-84a101d9ffda\") " Oct 07 08:14:44 crc kubenswrapper[4875]: I1007 08:14:44.858537 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pzncw\" (UniqueName: \"kubernetes.io/projected/ceb89a19-ace2-4804-8d6d-236266c6d7d9-kube-api-access-pzncw\") on node \"crc\" DevicePath \"\"" Oct 07 08:14:44 crc kubenswrapper[4875]: I1007 08:14:44.858561 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5b5ws\" (UniqueName: \"kubernetes.io/projected/39a4c6c6-dcf7-4d49-9a79-44258c5db723-kube-api-access-5b5ws\") on node \"crc\" DevicePath \"\"" Oct 07 08:14:44 crc kubenswrapper[4875]: I1007 08:14:44.863373 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5e168f8b-b075-472f-87c8-84a101d9ffda-kube-api-access-48ls5" (OuterVolumeSpecName: "kube-api-access-48ls5") pod "5e168f8b-b075-472f-87c8-84a101d9ffda" (UID: "5e168f8b-b075-472f-87c8-84a101d9ffda"). InnerVolumeSpecName "kube-api-access-48ls5". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 08:14:44 crc kubenswrapper[4875]: I1007 08:14:44.960900 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-48ls5\" (UniqueName: \"kubernetes.io/projected/5e168f8b-b075-472f-87c8-84a101d9ffda-kube-api-access-48ls5\") on node \"crc\" DevicePath \"\"" Oct 07 08:14:44 crc kubenswrapper[4875]: I1007 08:14:44.994792 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-7877fc45f6-htlwc" Oct 07 08:14:45 crc kubenswrapper[4875]: I1007 08:14:45.207755 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-xkx25" Oct 07 08:14:45 crc kubenswrapper[4875]: I1007 08:14:45.207908 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-xkx25" event={"ID":"5e168f8b-b075-472f-87c8-84a101d9ffda","Type":"ContainerDied","Data":"30adb759904c92d7d04b1bde7fe784e5bb8020645b31a88d2c0fa7f076f5614b"} Oct 07 08:14:45 crc kubenswrapper[4875]: I1007 08:14:45.207997 4875 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="30adb759904c92d7d04b1bde7fe784e5bb8020645b31a88d2c0fa7f076f5614b" Oct 07 08:14:45 crc kubenswrapper[4875]: I1007 08:14:45.211411 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-x7f47" event={"ID":"ceb89a19-ace2-4804-8d6d-236266c6d7d9","Type":"ContainerDied","Data":"7f73bbd2b1071e2805befcf4de60dcf92dabedb7611ec124b0553647ea9b9646"} Oct 07 08:14:45 crc kubenswrapper[4875]: I1007 08:14:45.211441 4875 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7f73bbd2b1071e2805befcf4de60dcf92dabedb7611ec124b0553647ea9b9646" Oct 07 08:14:45 crc kubenswrapper[4875]: I1007 08:14:45.211665 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-x7f47" Oct 07 08:14:45 crc kubenswrapper[4875]: I1007 08:14:45.213264 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-spt5r" event={"ID":"39a4c6c6-dcf7-4d49-9a79-44258c5db723","Type":"ContainerDied","Data":"d788435c7c2495618bdaccda2ce77756525bf0a96358b0d348962d3b75621974"} Oct 07 08:14:45 crc kubenswrapper[4875]: I1007 08:14:45.213306 4875 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d788435c7c2495618bdaccda2ce77756525bf0a96358b0d348962d3b75621974" Oct 07 08:14:45 crc kubenswrapper[4875]: I1007 08:14:45.213394 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-spt5r" Oct 07 08:14:45 crc kubenswrapper[4875]: I1007 08:14:45.217764 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"add9c499-b0a8-4e7c-b837-3e9baa672ccb","Type":"ContainerStarted","Data":"09d92fb1ba711740b088f255ebea24d170ae31a346a4ae2e2bc1bd4e80ec1444"} Oct 07 08:14:45 crc kubenswrapper[4875]: I1007 08:14:45.626237 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 07 08:14:46 crc kubenswrapper[4875]: I1007 08:14:46.229092 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"add9c499-b0a8-4e7c-b837-3e9baa672ccb","Type":"ContainerStarted","Data":"36e18cc3064112b06a12c98ff9bf353b4b6293d4a0486eab07660f7b09143380"} Oct 07 08:14:46 crc kubenswrapper[4875]: I1007 08:14:46.229563 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"add9c499-b0a8-4e7c-b837-3e9baa672ccb","Type":"ContainerStarted","Data":"5a745f67f59a00ac9fd4bfaa4982bf56aec894dc04d06eaf38cc297a670161dd"} Oct 07 08:14:48 crc kubenswrapper[4875]: I1007 08:14:48.091367 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-892a-account-create-5s8cf"] Oct 07 08:14:48 crc kubenswrapper[4875]: E1007 08:14:48.094943 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ceb89a19-ace2-4804-8d6d-236266c6d7d9" containerName="mariadb-database-create" Oct 07 08:14:48 crc kubenswrapper[4875]: I1007 08:14:48.095028 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="ceb89a19-ace2-4804-8d6d-236266c6d7d9" containerName="mariadb-database-create" Oct 07 08:14:48 crc kubenswrapper[4875]: E1007 08:14:48.095127 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="39a4c6c6-dcf7-4d49-9a79-44258c5db723" containerName="mariadb-database-create" Oct 07 08:14:48 crc kubenswrapper[4875]: I1007 08:14:48.095204 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="39a4c6c6-dcf7-4d49-9a79-44258c5db723" containerName="mariadb-database-create" Oct 07 08:14:48 crc kubenswrapper[4875]: E1007 08:14:48.095281 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5e168f8b-b075-472f-87c8-84a101d9ffda" containerName="mariadb-database-create" Oct 07 08:14:48 crc kubenswrapper[4875]: I1007 08:14:48.095338 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="5e168f8b-b075-472f-87c8-84a101d9ffda" containerName="mariadb-database-create" Oct 07 08:14:48 crc kubenswrapper[4875]: I1007 08:14:48.095570 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="ceb89a19-ace2-4804-8d6d-236266c6d7d9" containerName="mariadb-database-create" Oct 07 08:14:48 crc kubenswrapper[4875]: I1007 08:14:48.095668 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="39a4c6c6-dcf7-4d49-9a79-44258c5db723" containerName="mariadb-database-create" Oct 07 08:14:48 crc kubenswrapper[4875]: I1007 08:14:48.095739 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="5e168f8b-b075-472f-87c8-84a101d9ffda" containerName="mariadb-database-create" Oct 07 08:14:48 crc kubenswrapper[4875]: I1007 08:14:48.096614 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-892a-account-create-5s8cf" Oct 07 08:14:48 crc kubenswrapper[4875]: I1007 08:14:48.100306 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-db-secret" Oct 07 08:14:48 crc kubenswrapper[4875]: I1007 08:14:48.110794 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-892a-account-create-5s8cf"] Oct 07 08:14:48 crc kubenswrapper[4875]: I1007 08:14:48.235499 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jbtt7\" (UniqueName: \"kubernetes.io/projected/c95bfab8-a736-486e-ab63-99652c643651-kube-api-access-jbtt7\") pod \"nova-api-892a-account-create-5s8cf\" (UID: \"c95bfab8-a736-486e-ab63-99652c643651\") " pod="openstack/nova-api-892a-account-create-5s8cf" Oct 07 08:14:48 crc kubenswrapper[4875]: I1007 08:14:48.258385 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"add9c499-b0a8-4e7c-b837-3e9baa672ccb","Type":"ContainerStarted","Data":"cb31edc509407ed46d7cbbd2c7238222c54291de31de7b18153431afb9fae7bf"} Oct 07 08:14:48 crc kubenswrapper[4875]: I1007 08:14:48.258542 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Oct 07 08:14:48 crc kubenswrapper[4875]: I1007 08:14:48.258514 4875 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="add9c499-b0a8-4e7c-b837-3e9baa672ccb" containerName="ceilometer-central-agent" containerID="cri-o://09d92fb1ba711740b088f255ebea24d170ae31a346a4ae2e2bc1bd4e80ec1444" gracePeriod=30 Oct 07 08:14:48 crc kubenswrapper[4875]: I1007 08:14:48.258628 4875 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="add9c499-b0a8-4e7c-b837-3e9baa672ccb" containerName="ceilometer-notification-agent" containerID="cri-o://5a745f67f59a00ac9fd4bfaa4982bf56aec894dc04d06eaf38cc297a670161dd" gracePeriod=30 Oct 07 08:14:48 crc kubenswrapper[4875]: I1007 08:14:48.258597 4875 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="add9c499-b0a8-4e7c-b837-3e9baa672ccb" containerName="proxy-httpd" containerID="cri-o://cb31edc509407ed46d7cbbd2c7238222c54291de31de7b18153431afb9fae7bf" gracePeriod=30 Oct 07 08:14:48 crc kubenswrapper[4875]: I1007 08:14:48.258682 4875 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="add9c499-b0a8-4e7c-b837-3e9baa672ccb" containerName="sg-core" containerID="cri-o://36e18cc3064112b06a12c98ff9bf353b4b6293d4a0486eab07660f7b09143380" gracePeriod=30 Oct 07 08:14:48 crc kubenswrapper[4875]: I1007 08:14:48.312440 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-b31b-account-create-pgmkn"] Oct 07 08:14:48 crc kubenswrapper[4875]: I1007 08:14:48.314261 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-b31b-account-create-pgmkn" Oct 07 08:14:48 crc kubenswrapper[4875]: I1007 08:14:48.317952 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-db-secret" Oct 07 08:14:48 crc kubenswrapper[4875]: I1007 08:14:48.319645 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.5991538480000003 podStartE2EDuration="6.31962139s" podCreationTimestamp="2025-10-07 08:14:42 +0000 UTC" firstStartedPulling="2025-10-07 08:14:43.443901988 +0000 UTC m=+1108.403672531" lastFinishedPulling="2025-10-07 08:14:47.16436953 +0000 UTC m=+1112.124140073" observedRunningTime="2025-10-07 08:14:48.289335437 +0000 UTC m=+1113.249106000" watchObservedRunningTime="2025-10-07 08:14:48.31962139 +0000 UTC m=+1113.279391933" Oct 07 08:14:48 crc kubenswrapper[4875]: I1007 08:14:48.331248 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-b31b-account-create-pgmkn"] Oct 07 08:14:48 crc kubenswrapper[4875]: I1007 08:14:48.340442 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jbtt7\" (UniqueName: \"kubernetes.io/projected/c95bfab8-a736-486e-ab63-99652c643651-kube-api-access-jbtt7\") pod \"nova-api-892a-account-create-5s8cf\" (UID: \"c95bfab8-a736-486e-ab63-99652c643651\") " pod="openstack/nova-api-892a-account-create-5s8cf" Oct 07 08:14:48 crc kubenswrapper[4875]: I1007 08:14:48.363551 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jbtt7\" (UniqueName: \"kubernetes.io/projected/c95bfab8-a736-486e-ab63-99652c643651-kube-api-access-jbtt7\") pod \"nova-api-892a-account-create-5s8cf\" (UID: \"c95bfab8-a736-486e-ab63-99652c643651\") " pod="openstack/nova-api-892a-account-create-5s8cf" Oct 07 08:14:48 crc kubenswrapper[4875]: I1007 08:14:48.418532 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-892a-account-create-5s8cf" Oct 07 08:14:48 crc kubenswrapper[4875]: I1007 08:14:48.445685 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xfhxr\" (UniqueName: \"kubernetes.io/projected/733d2913-becf-4e28-b1a8-c8c510bd1fba-kube-api-access-xfhxr\") pod \"nova-cell0-b31b-account-create-pgmkn\" (UID: \"733d2913-becf-4e28-b1a8-c8c510bd1fba\") " pod="openstack/nova-cell0-b31b-account-create-pgmkn" Oct 07 08:14:48 crc kubenswrapper[4875]: I1007 08:14:48.547046 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xfhxr\" (UniqueName: \"kubernetes.io/projected/733d2913-becf-4e28-b1a8-c8c510bd1fba-kube-api-access-xfhxr\") pod \"nova-cell0-b31b-account-create-pgmkn\" (UID: \"733d2913-becf-4e28-b1a8-c8c510bd1fba\") " pod="openstack/nova-cell0-b31b-account-create-pgmkn" Oct 07 08:14:48 crc kubenswrapper[4875]: I1007 08:14:48.566858 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xfhxr\" (UniqueName: \"kubernetes.io/projected/733d2913-becf-4e28-b1a8-c8c510bd1fba-kube-api-access-xfhxr\") pod \"nova-cell0-b31b-account-create-pgmkn\" (UID: \"733d2913-becf-4e28-b1a8-c8c510bd1fba\") " pod="openstack/nova-cell0-b31b-account-create-pgmkn" Oct 07 08:14:48 crc kubenswrapper[4875]: I1007 08:14:48.589773 4875 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-8486dbbd8b-5n679" podUID="35b9d27a-06f2-4b00-917f-f078fdf1b1c2" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.148:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.148:8443: connect: connection refused" Oct 07 08:14:48 crc kubenswrapper[4875]: I1007 08:14:48.589900 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-8486dbbd8b-5n679" Oct 07 08:14:48 crc kubenswrapper[4875]: I1007 08:14:48.655922 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-b31b-account-create-pgmkn" Oct 07 08:14:48 crc kubenswrapper[4875]: I1007 08:14:48.927266 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-892a-account-create-5s8cf"] Oct 07 08:14:48 crc kubenswrapper[4875]: W1007 08:14:48.935639 4875 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc95bfab8_a736_486e_ab63_99652c643651.slice/crio-2e9bf6ed2d68d1f9cb80863358771fdc5ca3fc750f12c99d12e0010b57645662 WatchSource:0}: Error finding container 2e9bf6ed2d68d1f9cb80863358771fdc5ca3fc750f12c99d12e0010b57645662: Status 404 returned error can't find the container with id 2e9bf6ed2d68d1f9cb80863358771fdc5ca3fc750f12c99d12e0010b57645662 Oct 07 08:14:49 crc kubenswrapper[4875]: I1007 08:14:49.012578 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-5f5f877689-scxcf" Oct 07 08:14:49 crc kubenswrapper[4875]: I1007 08:14:49.102765 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-7877fc45f6-htlwc"] Oct 07 08:14:49 crc kubenswrapper[4875]: I1007 08:14:49.103528 4875 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-7877fc45f6-htlwc" podUID="2129b276-6537-40de-b872-ac05e2ab8545" containerName="neutron-api" containerID="cri-o://ff09e4f768783f0fed861e45b6062891be0bc258db1014051f91d46dcf9a004e" gracePeriod=30 Oct 07 08:14:49 crc kubenswrapper[4875]: I1007 08:14:49.103574 4875 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-7877fc45f6-htlwc" podUID="2129b276-6537-40de-b872-ac05e2ab8545" containerName="neutron-httpd" containerID="cri-o://501dc74d741d9300fa6e4807d5c33c2ec669f39513ddf518b0053b24ce213cee" gracePeriod=30 Oct 07 08:14:49 crc kubenswrapper[4875]: I1007 08:14:49.233271 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-b31b-account-create-pgmkn"] Oct 07 08:14:49 crc kubenswrapper[4875]: I1007 08:14:49.304531 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-b31b-account-create-pgmkn" event={"ID":"733d2913-becf-4e28-b1a8-c8c510bd1fba","Type":"ContainerStarted","Data":"106ac6d5c82266f6f4d31734b69c999fdd1507675ac38289e1d2677348edaa6c"} Oct 07 08:14:49 crc kubenswrapper[4875]: I1007 08:14:49.341722 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-892a-account-create-5s8cf" event={"ID":"c95bfab8-a736-486e-ab63-99652c643651","Type":"ContainerStarted","Data":"7e860d2be46efac7c7d8ff938355c6c487a90214ba868fa27403da0efd4b8d39"} Oct 07 08:14:49 crc kubenswrapper[4875]: I1007 08:14:49.341771 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-892a-account-create-5s8cf" event={"ID":"c95bfab8-a736-486e-ab63-99652c643651","Type":"ContainerStarted","Data":"2e9bf6ed2d68d1f9cb80863358771fdc5ca3fc750f12c99d12e0010b57645662"} Oct 07 08:14:49 crc kubenswrapper[4875]: I1007 08:14:49.383579 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-892a-account-create-5s8cf" podStartSLOduration=1.383563508 podStartE2EDuration="1.383563508s" podCreationTimestamp="2025-10-07 08:14:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 08:14:49.383157594 +0000 UTC m=+1114.342928137" watchObservedRunningTime="2025-10-07 08:14:49.383563508 +0000 UTC m=+1114.343334051" Oct 07 08:14:49 crc kubenswrapper[4875]: I1007 08:14:49.385400 4875 generic.go:334] "Generic (PLEG): container finished" podID="add9c499-b0a8-4e7c-b837-3e9baa672ccb" containerID="cb31edc509407ed46d7cbbd2c7238222c54291de31de7b18153431afb9fae7bf" exitCode=0 Oct 07 08:14:49 crc kubenswrapper[4875]: I1007 08:14:49.385431 4875 generic.go:334] "Generic (PLEG): container finished" podID="add9c499-b0a8-4e7c-b837-3e9baa672ccb" containerID="36e18cc3064112b06a12c98ff9bf353b4b6293d4a0486eab07660f7b09143380" exitCode=2 Oct 07 08:14:49 crc kubenswrapper[4875]: I1007 08:14:49.385439 4875 generic.go:334] "Generic (PLEG): container finished" podID="add9c499-b0a8-4e7c-b837-3e9baa672ccb" containerID="5a745f67f59a00ac9fd4bfaa4982bf56aec894dc04d06eaf38cc297a670161dd" exitCode=0 Oct 07 08:14:49 crc kubenswrapper[4875]: I1007 08:14:49.385462 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"add9c499-b0a8-4e7c-b837-3e9baa672ccb","Type":"ContainerDied","Data":"cb31edc509407ed46d7cbbd2c7238222c54291de31de7b18153431afb9fae7bf"} Oct 07 08:14:49 crc kubenswrapper[4875]: I1007 08:14:49.385486 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"add9c499-b0a8-4e7c-b837-3e9baa672ccb","Type":"ContainerDied","Data":"36e18cc3064112b06a12c98ff9bf353b4b6293d4a0486eab07660f7b09143380"} Oct 07 08:14:49 crc kubenswrapper[4875]: I1007 08:14:49.385496 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"add9c499-b0a8-4e7c-b837-3e9baa672ccb","Type":"ContainerDied","Data":"5a745f67f59a00ac9fd4bfaa4982bf56aec894dc04d06eaf38cc297a670161dd"} Oct 07 08:14:50 crc kubenswrapper[4875]: I1007 08:14:50.400632 4875 generic.go:334] "Generic (PLEG): container finished" podID="733d2913-becf-4e28-b1a8-c8c510bd1fba" containerID="823297c8a6a8bac1329d0a5274c85040fdbba71e878f34ec06d16c67bfb83770" exitCode=0 Oct 07 08:14:50 crc kubenswrapper[4875]: I1007 08:14:50.401049 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-b31b-account-create-pgmkn" event={"ID":"733d2913-becf-4e28-b1a8-c8c510bd1fba","Type":"ContainerDied","Data":"823297c8a6a8bac1329d0a5274c85040fdbba71e878f34ec06d16c67bfb83770"} Oct 07 08:14:50 crc kubenswrapper[4875]: I1007 08:14:50.404924 4875 generic.go:334] "Generic (PLEG): container finished" podID="2129b276-6537-40de-b872-ac05e2ab8545" containerID="501dc74d741d9300fa6e4807d5c33c2ec669f39513ddf518b0053b24ce213cee" exitCode=0 Oct 07 08:14:50 crc kubenswrapper[4875]: I1007 08:14:50.405023 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-7877fc45f6-htlwc" event={"ID":"2129b276-6537-40de-b872-ac05e2ab8545","Type":"ContainerDied","Data":"501dc74d741d9300fa6e4807d5c33c2ec669f39513ddf518b0053b24ce213cee"} Oct 07 08:14:50 crc kubenswrapper[4875]: I1007 08:14:50.410846 4875 generic.go:334] "Generic (PLEG): container finished" podID="c95bfab8-a736-486e-ab63-99652c643651" containerID="7e860d2be46efac7c7d8ff938355c6c487a90214ba868fa27403da0efd4b8d39" exitCode=0 Oct 07 08:14:50 crc kubenswrapper[4875]: I1007 08:14:50.410995 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-892a-account-create-5s8cf" event={"ID":"c95bfab8-a736-486e-ab63-99652c643651","Type":"ContainerDied","Data":"7e860d2be46efac7c7d8ff938355c6c487a90214ba868fa27403da0efd4b8d39"} Oct 07 08:14:51 crc kubenswrapper[4875]: I1007 08:14:51.899838 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-b31b-account-create-pgmkn" Oct 07 08:14:51 crc kubenswrapper[4875]: I1007 08:14:51.917049 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-892a-account-create-5s8cf" Oct 07 08:14:52 crc kubenswrapper[4875]: I1007 08:14:52.048912 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xfhxr\" (UniqueName: \"kubernetes.io/projected/733d2913-becf-4e28-b1a8-c8c510bd1fba-kube-api-access-xfhxr\") pod \"733d2913-becf-4e28-b1a8-c8c510bd1fba\" (UID: \"733d2913-becf-4e28-b1a8-c8c510bd1fba\") " Oct 07 08:14:52 crc kubenswrapper[4875]: I1007 08:14:52.049492 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jbtt7\" (UniqueName: \"kubernetes.io/projected/c95bfab8-a736-486e-ab63-99652c643651-kube-api-access-jbtt7\") pod \"c95bfab8-a736-486e-ab63-99652c643651\" (UID: \"c95bfab8-a736-486e-ab63-99652c643651\") " Oct 07 08:14:52 crc kubenswrapper[4875]: I1007 08:14:52.057868 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/733d2913-becf-4e28-b1a8-c8c510bd1fba-kube-api-access-xfhxr" (OuterVolumeSpecName: "kube-api-access-xfhxr") pod "733d2913-becf-4e28-b1a8-c8c510bd1fba" (UID: "733d2913-becf-4e28-b1a8-c8c510bd1fba"). InnerVolumeSpecName "kube-api-access-xfhxr". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 08:14:52 crc kubenswrapper[4875]: I1007 08:14:52.059260 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c95bfab8-a736-486e-ab63-99652c643651-kube-api-access-jbtt7" (OuterVolumeSpecName: "kube-api-access-jbtt7") pod "c95bfab8-a736-486e-ab63-99652c643651" (UID: "c95bfab8-a736-486e-ab63-99652c643651"). InnerVolumeSpecName "kube-api-access-jbtt7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 08:14:52 crc kubenswrapper[4875]: I1007 08:14:52.152423 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xfhxr\" (UniqueName: \"kubernetes.io/projected/733d2913-becf-4e28-b1a8-c8c510bd1fba-kube-api-access-xfhxr\") on node \"crc\" DevicePath \"\"" Oct 07 08:14:52 crc kubenswrapper[4875]: I1007 08:14:52.152467 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jbtt7\" (UniqueName: \"kubernetes.io/projected/c95bfab8-a736-486e-ab63-99652c643651-kube-api-access-jbtt7\") on node \"crc\" DevicePath \"\"" Oct 07 08:14:52 crc kubenswrapper[4875]: I1007 08:14:52.439658 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-892a-account-create-5s8cf" event={"ID":"c95bfab8-a736-486e-ab63-99652c643651","Type":"ContainerDied","Data":"2e9bf6ed2d68d1f9cb80863358771fdc5ca3fc750f12c99d12e0010b57645662"} Oct 07 08:14:52 crc kubenswrapper[4875]: I1007 08:14:52.440292 4875 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2e9bf6ed2d68d1f9cb80863358771fdc5ca3fc750f12c99d12e0010b57645662" Oct 07 08:14:52 crc kubenswrapper[4875]: I1007 08:14:52.440377 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-892a-account-create-5s8cf" Oct 07 08:14:52 crc kubenswrapper[4875]: I1007 08:14:52.444552 4875 generic.go:334] "Generic (PLEG): container finished" podID="add9c499-b0a8-4e7c-b837-3e9baa672ccb" containerID="09d92fb1ba711740b088f255ebea24d170ae31a346a4ae2e2bc1bd4e80ec1444" exitCode=0 Oct 07 08:14:52 crc kubenswrapper[4875]: I1007 08:14:52.444615 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"add9c499-b0a8-4e7c-b837-3e9baa672ccb","Type":"ContainerDied","Data":"09d92fb1ba711740b088f255ebea24d170ae31a346a4ae2e2bc1bd4e80ec1444"} Oct 07 08:14:52 crc kubenswrapper[4875]: I1007 08:14:52.446419 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-b31b-account-create-pgmkn" event={"ID":"733d2913-becf-4e28-b1a8-c8c510bd1fba","Type":"ContainerDied","Data":"106ac6d5c82266f6f4d31734b69c999fdd1507675ac38289e1d2677348edaa6c"} Oct 07 08:14:52 crc kubenswrapper[4875]: I1007 08:14:52.446444 4875 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="106ac6d5c82266f6f4d31734b69c999fdd1507675ac38289e1d2677348edaa6c" Oct 07 08:14:52 crc kubenswrapper[4875]: I1007 08:14:52.446489 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-b31b-account-create-pgmkn" Oct 07 08:14:52 crc kubenswrapper[4875]: I1007 08:14:52.494681 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 07 08:14:52 crc kubenswrapper[4875]: I1007 08:14:52.562446 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/add9c499-b0a8-4e7c-b837-3e9baa672ccb-run-httpd\") pod \"add9c499-b0a8-4e7c-b837-3e9baa672ccb\" (UID: \"add9c499-b0a8-4e7c-b837-3e9baa672ccb\") " Oct 07 08:14:52 crc kubenswrapper[4875]: I1007 08:14:52.562681 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z9jbj\" (UniqueName: \"kubernetes.io/projected/add9c499-b0a8-4e7c-b837-3e9baa672ccb-kube-api-access-z9jbj\") pod \"add9c499-b0a8-4e7c-b837-3e9baa672ccb\" (UID: \"add9c499-b0a8-4e7c-b837-3e9baa672ccb\") " Oct 07 08:14:52 crc kubenswrapper[4875]: I1007 08:14:52.562737 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/add9c499-b0a8-4e7c-b837-3e9baa672ccb-log-httpd\") pod \"add9c499-b0a8-4e7c-b837-3e9baa672ccb\" (UID: \"add9c499-b0a8-4e7c-b837-3e9baa672ccb\") " Oct 07 08:14:52 crc kubenswrapper[4875]: I1007 08:14:52.562817 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/add9c499-b0a8-4e7c-b837-3e9baa672ccb-combined-ca-bundle\") pod \"add9c499-b0a8-4e7c-b837-3e9baa672ccb\" (UID: \"add9c499-b0a8-4e7c-b837-3e9baa672ccb\") " Oct 07 08:14:52 crc kubenswrapper[4875]: I1007 08:14:52.562951 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/add9c499-b0a8-4e7c-b837-3e9baa672ccb-config-data\") pod \"add9c499-b0a8-4e7c-b837-3e9baa672ccb\" (UID: \"add9c499-b0a8-4e7c-b837-3e9baa672ccb\") " Oct 07 08:14:52 crc kubenswrapper[4875]: I1007 08:14:52.563006 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/add9c499-b0a8-4e7c-b837-3e9baa672ccb-sg-core-conf-yaml\") pod \"add9c499-b0a8-4e7c-b837-3e9baa672ccb\" (UID: \"add9c499-b0a8-4e7c-b837-3e9baa672ccb\") " Oct 07 08:14:52 crc kubenswrapper[4875]: I1007 08:14:52.563139 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/add9c499-b0a8-4e7c-b837-3e9baa672ccb-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "add9c499-b0a8-4e7c-b837-3e9baa672ccb" (UID: "add9c499-b0a8-4e7c-b837-3e9baa672ccb"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 08:14:52 crc kubenswrapper[4875]: I1007 08:14:52.563789 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/add9c499-b0a8-4e7c-b837-3e9baa672ccb-scripts\") pod \"add9c499-b0a8-4e7c-b837-3e9baa672ccb\" (UID: \"add9c499-b0a8-4e7c-b837-3e9baa672ccb\") " Oct 07 08:14:52 crc kubenswrapper[4875]: I1007 08:14:52.564325 4875 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/add9c499-b0a8-4e7c-b837-3e9baa672ccb-run-httpd\") on node \"crc\" DevicePath \"\"" Oct 07 08:14:52 crc kubenswrapper[4875]: I1007 08:14:52.564439 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/add9c499-b0a8-4e7c-b837-3e9baa672ccb-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "add9c499-b0a8-4e7c-b837-3e9baa672ccb" (UID: "add9c499-b0a8-4e7c-b837-3e9baa672ccb"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 08:14:52 crc kubenswrapper[4875]: I1007 08:14:52.568781 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/add9c499-b0a8-4e7c-b837-3e9baa672ccb-scripts" (OuterVolumeSpecName: "scripts") pod "add9c499-b0a8-4e7c-b837-3e9baa672ccb" (UID: "add9c499-b0a8-4e7c-b837-3e9baa672ccb"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:14:52 crc kubenswrapper[4875]: I1007 08:14:52.569502 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/add9c499-b0a8-4e7c-b837-3e9baa672ccb-kube-api-access-z9jbj" (OuterVolumeSpecName: "kube-api-access-z9jbj") pod "add9c499-b0a8-4e7c-b837-3e9baa672ccb" (UID: "add9c499-b0a8-4e7c-b837-3e9baa672ccb"). InnerVolumeSpecName "kube-api-access-z9jbj". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 08:14:52 crc kubenswrapper[4875]: I1007 08:14:52.600663 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/add9c499-b0a8-4e7c-b837-3e9baa672ccb-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "add9c499-b0a8-4e7c-b837-3e9baa672ccb" (UID: "add9c499-b0a8-4e7c-b837-3e9baa672ccb"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:14:52 crc kubenswrapper[4875]: I1007 08:14:52.663155 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/add9c499-b0a8-4e7c-b837-3e9baa672ccb-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "add9c499-b0a8-4e7c-b837-3e9baa672ccb" (UID: "add9c499-b0a8-4e7c-b837-3e9baa672ccb"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:14:52 crc kubenswrapper[4875]: I1007 08:14:52.667528 4875 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/add9c499-b0a8-4e7c-b837-3e9baa672ccb-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Oct 07 08:14:52 crc kubenswrapper[4875]: I1007 08:14:52.667579 4875 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/add9c499-b0a8-4e7c-b837-3e9baa672ccb-scripts\") on node \"crc\" DevicePath \"\"" Oct 07 08:14:52 crc kubenswrapper[4875]: I1007 08:14:52.667594 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z9jbj\" (UniqueName: \"kubernetes.io/projected/add9c499-b0a8-4e7c-b837-3e9baa672ccb-kube-api-access-z9jbj\") on node \"crc\" DevicePath \"\"" Oct 07 08:14:52 crc kubenswrapper[4875]: I1007 08:14:52.667615 4875 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/add9c499-b0a8-4e7c-b837-3e9baa672ccb-log-httpd\") on node \"crc\" DevicePath \"\"" Oct 07 08:14:52 crc kubenswrapper[4875]: I1007 08:14:52.667628 4875 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/add9c499-b0a8-4e7c-b837-3e9baa672ccb-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 08:14:52 crc kubenswrapper[4875]: I1007 08:14:52.692695 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/add9c499-b0a8-4e7c-b837-3e9baa672ccb-config-data" (OuterVolumeSpecName: "config-data") pod "add9c499-b0a8-4e7c-b837-3e9baa672ccb" (UID: "add9c499-b0a8-4e7c-b837-3e9baa672ccb"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:14:52 crc kubenswrapper[4875]: I1007 08:14:52.770172 4875 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/add9c499-b0a8-4e7c-b837-3e9baa672ccb-config-data\") on node \"crc\" DevicePath \"\"" Oct 07 08:14:53 crc kubenswrapper[4875]: I1007 08:14:53.473561 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"add9c499-b0a8-4e7c-b837-3e9baa672ccb","Type":"ContainerDied","Data":"ec9b70f2c312d2a6eb6aa24c36bfbce6823afe3b043defa6a101ff240e4bc882"} Oct 07 08:14:53 crc kubenswrapper[4875]: I1007 08:14:53.474029 4875 scope.go:117] "RemoveContainer" containerID="cb31edc509407ed46d7cbbd2c7238222c54291de31de7b18153431afb9fae7bf" Oct 07 08:14:53 crc kubenswrapper[4875]: I1007 08:14:53.474140 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 07 08:14:53 crc kubenswrapper[4875]: I1007 08:14:53.539245 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 07 08:14:53 crc kubenswrapper[4875]: I1007 08:14:53.567029 4875 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Oct 07 08:14:53 crc kubenswrapper[4875]: I1007 08:14:53.577378 4875 scope.go:117] "RemoveContainer" containerID="36e18cc3064112b06a12c98ff9bf353b4b6293d4a0486eab07660f7b09143380" Oct 07 08:14:53 crc kubenswrapper[4875]: I1007 08:14:53.577624 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Oct 07 08:14:53 crc kubenswrapper[4875]: E1007 08:14:53.578213 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="733d2913-becf-4e28-b1a8-c8c510bd1fba" containerName="mariadb-account-create" Oct 07 08:14:53 crc kubenswrapper[4875]: I1007 08:14:53.578238 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="733d2913-becf-4e28-b1a8-c8c510bd1fba" containerName="mariadb-account-create" Oct 07 08:14:53 crc kubenswrapper[4875]: E1007 08:14:53.578274 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c95bfab8-a736-486e-ab63-99652c643651" containerName="mariadb-account-create" Oct 07 08:14:53 crc kubenswrapper[4875]: I1007 08:14:53.578283 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="c95bfab8-a736-486e-ab63-99652c643651" containerName="mariadb-account-create" Oct 07 08:14:53 crc kubenswrapper[4875]: E1007 08:14:53.578296 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="add9c499-b0a8-4e7c-b837-3e9baa672ccb" containerName="ceilometer-notification-agent" Oct 07 08:14:53 crc kubenswrapper[4875]: I1007 08:14:53.578303 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="add9c499-b0a8-4e7c-b837-3e9baa672ccb" containerName="ceilometer-notification-agent" Oct 07 08:14:53 crc kubenswrapper[4875]: E1007 08:14:53.578327 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="add9c499-b0a8-4e7c-b837-3e9baa672ccb" containerName="ceilometer-central-agent" Oct 07 08:14:53 crc kubenswrapper[4875]: I1007 08:14:53.578335 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="add9c499-b0a8-4e7c-b837-3e9baa672ccb" containerName="ceilometer-central-agent" Oct 07 08:14:53 crc kubenswrapper[4875]: E1007 08:14:53.578346 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="add9c499-b0a8-4e7c-b837-3e9baa672ccb" containerName="sg-core" Oct 07 08:14:53 crc kubenswrapper[4875]: I1007 08:14:53.578353 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="add9c499-b0a8-4e7c-b837-3e9baa672ccb" containerName="sg-core" Oct 07 08:14:53 crc kubenswrapper[4875]: E1007 08:14:53.578372 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="add9c499-b0a8-4e7c-b837-3e9baa672ccb" containerName="proxy-httpd" Oct 07 08:14:53 crc kubenswrapper[4875]: I1007 08:14:53.578379 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="add9c499-b0a8-4e7c-b837-3e9baa672ccb" containerName="proxy-httpd" Oct 07 08:14:53 crc kubenswrapper[4875]: I1007 08:14:53.578631 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="add9c499-b0a8-4e7c-b837-3e9baa672ccb" containerName="ceilometer-notification-agent" Oct 07 08:14:53 crc kubenswrapper[4875]: I1007 08:14:53.578650 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="add9c499-b0a8-4e7c-b837-3e9baa672ccb" containerName="proxy-httpd" Oct 07 08:14:53 crc kubenswrapper[4875]: I1007 08:14:53.578664 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="c95bfab8-a736-486e-ab63-99652c643651" containerName="mariadb-account-create" Oct 07 08:14:53 crc kubenswrapper[4875]: I1007 08:14:53.578679 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="add9c499-b0a8-4e7c-b837-3e9baa672ccb" containerName="ceilometer-central-agent" Oct 07 08:14:53 crc kubenswrapper[4875]: I1007 08:14:53.578691 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="add9c499-b0a8-4e7c-b837-3e9baa672ccb" containerName="sg-core" Oct 07 08:14:53 crc kubenswrapper[4875]: I1007 08:14:53.578702 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="733d2913-becf-4e28-b1a8-c8c510bd1fba" containerName="mariadb-account-create" Oct 07 08:14:53 crc kubenswrapper[4875]: I1007 08:14:53.590655 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 07 08:14:53 crc kubenswrapper[4875]: I1007 08:14:53.609272 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 07 08:14:53 crc kubenswrapper[4875]: I1007 08:14:53.614579 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Oct 07 08:14:53 crc kubenswrapper[4875]: I1007 08:14:53.615417 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Oct 07 08:14:53 crc kubenswrapper[4875]: I1007 08:14:53.631528 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-db-sync-8bg6r"] Oct 07 08:14:53 crc kubenswrapper[4875]: I1007 08:14:53.633758 4875 scope.go:117] "RemoveContainer" containerID="5a745f67f59a00ac9fd4bfaa4982bf56aec894dc04d06eaf38cc297a670161dd" Oct 07 08:14:53 crc kubenswrapper[4875]: I1007 08:14:53.634860 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-8bg6r" Oct 07 08:14:53 crc kubenswrapper[4875]: I1007 08:14:53.637764 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-mvvw7" Oct 07 08:14:53 crc kubenswrapper[4875]: I1007 08:14:53.638232 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-scripts" Oct 07 08:14:53 crc kubenswrapper[4875]: I1007 08:14:53.638785 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Oct 07 08:14:53 crc kubenswrapper[4875]: I1007 08:14:53.667031 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-8bg6r"] Oct 07 08:14:53 crc kubenswrapper[4875]: I1007 08:14:53.672801 4875 scope.go:117] "RemoveContainer" containerID="09d92fb1ba711740b088f255ebea24d170ae31a346a4ae2e2bc1bd4e80ec1444" Oct 07 08:14:53 crc kubenswrapper[4875]: I1007 08:14:53.690332 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6d0b6a9b-b0f6-4a35-8864-c1ee479584ad-scripts\") pod \"ceilometer-0\" (UID: \"6d0b6a9b-b0f6-4a35-8864-c1ee479584ad\") " pod="openstack/ceilometer-0" Oct 07 08:14:53 crc kubenswrapper[4875]: I1007 08:14:53.690413 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/6d0b6a9b-b0f6-4a35-8864-c1ee479584ad-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"6d0b6a9b-b0f6-4a35-8864-c1ee479584ad\") " pod="openstack/ceilometer-0" Oct 07 08:14:53 crc kubenswrapper[4875]: I1007 08:14:53.690502 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6d0b6a9b-b0f6-4a35-8864-c1ee479584ad-log-httpd\") pod \"ceilometer-0\" (UID: \"6d0b6a9b-b0f6-4a35-8864-c1ee479584ad\") " pod="openstack/ceilometer-0" Oct 07 08:14:53 crc kubenswrapper[4875]: I1007 08:14:53.690539 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6d0b6a9b-b0f6-4a35-8864-c1ee479584ad-run-httpd\") pod \"ceilometer-0\" (UID: \"6d0b6a9b-b0f6-4a35-8864-c1ee479584ad\") " pod="openstack/ceilometer-0" Oct 07 08:14:53 crc kubenswrapper[4875]: I1007 08:14:53.690584 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6d0b6a9b-b0f6-4a35-8864-c1ee479584ad-config-data\") pod \"ceilometer-0\" (UID: \"6d0b6a9b-b0f6-4a35-8864-c1ee479584ad\") " pod="openstack/ceilometer-0" Oct 07 08:14:53 crc kubenswrapper[4875]: I1007 08:14:53.690673 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-58nwh\" (UniqueName: \"kubernetes.io/projected/6d0b6a9b-b0f6-4a35-8864-c1ee479584ad-kube-api-access-58nwh\") pod \"ceilometer-0\" (UID: \"6d0b6a9b-b0f6-4a35-8864-c1ee479584ad\") " pod="openstack/ceilometer-0" Oct 07 08:14:53 crc kubenswrapper[4875]: I1007 08:14:53.690708 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6d0b6a9b-b0f6-4a35-8864-c1ee479584ad-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"6d0b6a9b-b0f6-4a35-8864-c1ee479584ad\") " pod="openstack/ceilometer-0" Oct 07 08:14:53 crc kubenswrapper[4875]: I1007 08:14:53.731330 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="add9c499-b0a8-4e7c-b837-3e9baa672ccb" path="/var/lib/kubelet/pods/add9c499-b0a8-4e7c-b837-3e9baa672ccb/volumes" Oct 07 08:14:53 crc kubenswrapper[4875]: I1007 08:14:53.792955 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kzl7d\" (UniqueName: \"kubernetes.io/projected/3f6edd8b-6c78-4ab9-9cbc-49b66303467d-kube-api-access-kzl7d\") pod \"nova-cell0-conductor-db-sync-8bg6r\" (UID: \"3f6edd8b-6c78-4ab9-9cbc-49b66303467d\") " pod="openstack/nova-cell0-conductor-db-sync-8bg6r" Oct 07 08:14:53 crc kubenswrapper[4875]: I1007 08:14:53.793330 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-58nwh\" (UniqueName: \"kubernetes.io/projected/6d0b6a9b-b0f6-4a35-8864-c1ee479584ad-kube-api-access-58nwh\") pod \"ceilometer-0\" (UID: \"6d0b6a9b-b0f6-4a35-8864-c1ee479584ad\") " pod="openstack/ceilometer-0" Oct 07 08:14:53 crc kubenswrapper[4875]: I1007 08:14:53.793441 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3f6edd8b-6c78-4ab9-9cbc-49b66303467d-config-data\") pod \"nova-cell0-conductor-db-sync-8bg6r\" (UID: \"3f6edd8b-6c78-4ab9-9cbc-49b66303467d\") " pod="openstack/nova-cell0-conductor-db-sync-8bg6r" Oct 07 08:14:53 crc kubenswrapper[4875]: I1007 08:14:53.793476 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3f6edd8b-6c78-4ab9-9cbc-49b66303467d-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-8bg6r\" (UID: \"3f6edd8b-6c78-4ab9-9cbc-49b66303467d\") " pod="openstack/nova-cell0-conductor-db-sync-8bg6r" Oct 07 08:14:53 crc kubenswrapper[4875]: I1007 08:14:53.793504 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3f6edd8b-6c78-4ab9-9cbc-49b66303467d-scripts\") pod \"nova-cell0-conductor-db-sync-8bg6r\" (UID: \"3f6edd8b-6c78-4ab9-9cbc-49b66303467d\") " pod="openstack/nova-cell0-conductor-db-sync-8bg6r" Oct 07 08:14:53 crc kubenswrapper[4875]: I1007 08:14:53.793539 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6d0b6a9b-b0f6-4a35-8864-c1ee479584ad-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"6d0b6a9b-b0f6-4a35-8864-c1ee479584ad\") " pod="openstack/ceilometer-0" Oct 07 08:14:53 crc kubenswrapper[4875]: I1007 08:14:53.793645 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6d0b6a9b-b0f6-4a35-8864-c1ee479584ad-scripts\") pod \"ceilometer-0\" (UID: \"6d0b6a9b-b0f6-4a35-8864-c1ee479584ad\") " pod="openstack/ceilometer-0" Oct 07 08:14:53 crc kubenswrapper[4875]: I1007 08:14:53.793718 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/6d0b6a9b-b0f6-4a35-8864-c1ee479584ad-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"6d0b6a9b-b0f6-4a35-8864-c1ee479584ad\") " pod="openstack/ceilometer-0" Oct 07 08:14:53 crc kubenswrapper[4875]: I1007 08:14:53.793992 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6d0b6a9b-b0f6-4a35-8864-c1ee479584ad-log-httpd\") pod \"ceilometer-0\" (UID: \"6d0b6a9b-b0f6-4a35-8864-c1ee479584ad\") " pod="openstack/ceilometer-0" Oct 07 08:14:53 crc kubenswrapper[4875]: I1007 08:14:53.794058 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6d0b6a9b-b0f6-4a35-8864-c1ee479584ad-run-httpd\") pod \"ceilometer-0\" (UID: \"6d0b6a9b-b0f6-4a35-8864-c1ee479584ad\") " pod="openstack/ceilometer-0" Oct 07 08:14:53 crc kubenswrapper[4875]: I1007 08:14:53.794164 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6d0b6a9b-b0f6-4a35-8864-c1ee479584ad-config-data\") pod \"ceilometer-0\" (UID: \"6d0b6a9b-b0f6-4a35-8864-c1ee479584ad\") " pod="openstack/ceilometer-0" Oct 07 08:14:53 crc kubenswrapper[4875]: I1007 08:14:53.794897 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6d0b6a9b-b0f6-4a35-8864-c1ee479584ad-run-httpd\") pod \"ceilometer-0\" (UID: \"6d0b6a9b-b0f6-4a35-8864-c1ee479584ad\") " pod="openstack/ceilometer-0" Oct 07 08:14:53 crc kubenswrapper[4875]: I1007 08:14:53.795143 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6d0b6a9b-b0f6-4a35-8864-c1ee479584ad-log-httpd\") pod \"ceilometer-0\" (UID: \"6d0b6a9b-b0f6-4a35-8864-c1ee479584ad\") " pod="openstack/ceilometer-0" Oct 07 08:14:53 crc kubenswrapper[4875]: I1007 08:14:53.801982 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6d0b6a9b-b0f6-4a35-8864-c1ee479584ad-scripts\") pod \"ceilometer-0\" (UID: \"6d0b6a9b-b0f6-4a35-8864-c1ee479584ad\") " pod="openstack/ceilometer-0" Oct 07 08:14:53 crc kubenswrapper[4875]: I1007 08:14:53.802237 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6d0b6a9b-b0f6-4a35-8864-c1ee479584ad-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"6d0b6a9b-b0f6-4a35-8864-c1ee479584ad\") " pod="openstack/ceilometer-0" Oct 07 08:14:53 crc kubenswrapper[4875]: I1007 08:14:53.803979 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6d0b6a9b-b0f6-4a35-8864-c1ee479584ad-config-data\") pod \"ceilometer-0\" (UID: \"6d0b6a9b-b0f6-4a35-8864-c1ee479584ad\") " pod="openstack/ceilometer-0" Oct 07 08:14:53 crc kubenswrapper[4875]: I1007 08:14:53.810934 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/6d0b6a9b-b0f6-4a35-8864-c1ee479584ad-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"6d0b6a9b-b0f6-4a35-8864-c1ee479584ad\") " pod="openstack/ceilometer-0" Oct 07 08:14:53 crc kubenswrapper[4875]: I1007 08:14:53.815308 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-58nwh\" (UniqueName: \"kubernetes.io/projected/6d0b6a9b-b0f6-4a35-8864-c1ee479584ad-kube-api-access-58nwh\") pod \"ceilometer-0\" (UID: \"6d0b6a9b-b0f6-4a35-8864-c1ee479584ad\") " pod="openstack/ceilometer-0" Oct 07 08:14:53 crc kubenswrapper[4875]: I1007 08:14:53.895678 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3f6edd8b-6c78-4ab9-9cbc-49b66303467d-config-data\") pod \"nova-cell0-conductor-db-sync-8bg6r\" (UID: \"3f6edd8b-6c78-4ab9-9cbc-49b66303467d\") " pod="openstack/nova-cell0-conductor-db-sync-8bg6r" Oct 07 08:14:53 crc kubenswrapper[4875]: I1007 08:14:53.895805 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3f6edd8b-6c78-4ab9-9cbc-49b66303467d-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-8bg6r\" (UID: \"3f6edd8b-6c78-4ab9-9cbc-49b66303467d\") " pod="openstack/nova-cell0-conductor-db-sync-8bg6r" Oct 07 08:14:53 crc kubenswrapper[4875]: I1007 08:14:53.897022 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3f6edd8b-6c78-4ab9-9cbc-49b66303467d-scripts\") pod \"nova-cell0-conductor-db-sync-8bg6r\" (UID: \"3f6edd8b-6c78-4ab9-9cbc-49b66303467d\") " pod="openstack/nova-cell0-conductor-db-sync-8bg6r" Oct 07 08:14:53 crc kubenswrapper[4875]: I1007 08:14:53.897269 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kzl7d\" (UniqueName: \"kubernetes.io/projected/3f6edd8b-6c78-4ab9-9cbc-49b66303467d-kube-api-access-kzl7d\") pod \"nova-cell0-conductor-db-sync-8bg6r\" (UID: \"3f6edd8b-6c78-4ab9-9cbc-49b66303467d\") " pod="openstack/nova-cell0-conductor-db-sync-8bg6r" Oct 07 08:14:53 crc kubenswrapper[4875]: I1007 08:14:53.903071 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3f6edd8b-6c78-4ab9-9cbc-49b66303467d-config-data\") pod \"nova-cell0-conductor-db-sync-8bg6r\" (UID: \"3f6edd8b-6c78-4ab9-9cbc-49b66303467d\") " pod="openstack/nova-cell0-conductor-db-sync-8bg6r" Oct 07 08:14:53 crc kubenswrapper[4875]: I1007 08:14:53.903983 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3f6edd8b-6c78-4ab9-9cbc-49b66303467d-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-8bg6r\" (UID: \"3f6edd8b-6c78-4ab9-9cbc-49b66303467d\") " pod="openstack/nova-cell0-conductor-db-sync-8bg6r" Oct 07 08:14:53 crc kubenswrapper[4875]: I1007 08:14:53.904590 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3f6edd8b-6c78-4ab9-9cbc-49b66303467d-scripts\") pod \"nova-cell0-conductor-db-sync-8bg6r\" (UID: \"3f6edd8b-6c78-4ab9-9cbc-49b66303467d\") " pod="openstack/nova-cell0-conductor-db-sync-8bg6r" Oct 07 08:14:53 crc kubenswrapper[4875]: I1007 08:14:53.920659 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kzl7d\" (UniqueName: \"kubernetes.io/projected/3f6edd8b-6c78-4ab9-9cbc-49b66303467d-kube-api-access-kzl7d\") pod \"nova-cell0-conductor-db-sync-8bg6r\" (UID: \"3f6edd8b-6c78-4ab9-9cbc-49b66303467d\") " pod="openstack/nova-cell0-conductor-db-sync-8bg6r" Oct 07 08:14:53 crc kubenswrapper[4875]: I1007 08:14:53.932159 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 07 08:14:53 crc kubenswrapper[4875]: I1007 08:14:53.966668 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-8bg6r" Oct 07 08:14:54 crc kubenswrapper[4875]: I1007 08:14:54.264434 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-8486dbbd8b-5n679" Oct 07 08:14:54 crc kubenswrapper[4875]: I1007 08:14:54.409827 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/35b9d27a-06f2-4b00-917f-f078fdf1b1c2-horizon-tls-certs\") pod \"35b9d27a-06f2-4b00-917f-f078fdf1b1c2\" (UID: \"35b9d27a-06f2-4b00-917f-f078fdf1b1c2\") " Oct 07 08:14:54 crc kubenswrapper[4875]: I1007 08:14:54.410241 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/35b9d27a-06f2-4b00-917f-f078fdf1b1c2-combined-ca-bundle\") pod \"35b9d27a-06f2-4b00-917f-f078fdf1b1c2\" (UID: \"35b9d27a-06f2-4b00-917f-f078fdf1b1c2\") " Oct 07 08:14:54 crc kubenswrapper[4875]: I1007 08:14:54.410292 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2j5r4\" (UniqueName: \"kubernetes.io/projected/35b9d27a-06f2-4b00-917f-f078fdf1b1c2-kube-api-access-2j5r4\") pod \"35b9d27a-06f2-4b00-917f-f078fdf1b1c2\" (UID: \"35b9d27a-06f2-4b00-917f-f078fdf1b1c2\") " Oct 07 08:14:54 crc kubenswrapper[4875]: I1007 08:14:54.410359 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/35b9d27a-06f2-4b00-917f-f078fdf1b1c2-logs\") pod \"35b9d27a-06f2-4b00-917f-f078fdf1b1c2\" (UID: \"35b9d27a-06f2-4b00-917f-f078fdf1b1c2\") " Oct 07 08:14:54 crc kubenswrapper[4875]: I1007 08:14:54.410406 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/35b9d27a-06f2-4b00-917f-f078fdf1b1c2-config-data\") pod \"35b9d27a-06f2-4b00-917f-f078fdf1b1c2\" (UID: \"35b9d27a-06f2-4b00-917f-f078fdf1b1c2\") " Oct 07 08:14:54 crc kubenswrapper[4875]: I1007 08:14:54.410510 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/35b9d27a-06f2-4b00-917f-f078fdf1b1c2-scripts\") pod \"35b9d27a-06f2-4b00-917f-f078fdf1b1c2\" (UID: \"35b9d27a-06f2-4b00-917f-f078fdf1b1c2\") " Oct 07 08:14:54 crc kubenswrapper[4875]: I1007 08:14:54.410580 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/35b9d27a-06f2-4b00-917f-f078fdf1b1c2-horizon-secret-key\") pod \"35b9d27a-06f2-4b00-917f-f078fdf1b1c2\" (UID: \"35b9d27a-06f2-4b00-917f-f078fdf1b1c2\") " Oct 07 08:14:54 crc kubenswrapper[4875]: I1007 08:14:54.411296 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/35b9d27a-06f2-4b00-917f-f078fdf1b1c2-logs" (OuterVolumeSpecName: "logs") pod "35b9d27a-06f2-4b00-917f-f078fdf1b1c2" (UID: "35b9d27a-06f2-4b00-917f-f078fdf1b1c2"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 08:14:54 crc kubenswrapper[4875]: I1007 08:14:54.420792 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/35b9d27a-06f2-4b00-917f-f078fdf1b1c2-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "35b9d27a-06f2-4b00-917f-f078fdf1b1c2" (UID: "35b9d27a-06f2-4b00-917f-f078fdf1b1c2"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:14:54 crc kubenswrapper[4875]: I1007 08:14:54.421426 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/35b9d27a-06f2-4b00-917f-f078fdf1b1c2-kube-api-access-2j5r4" (OuterVolumeSpecName: "kube-api-access-2j5r4") pod "35b9d27a-06f2-4b00-917f-f078fdf1b1c2" (UID: "35b9d27a-06f2-4b00-917f-f078fdf1b1c2"). InnerVolumeSpecName "kube-api-access-2j5r4". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 08:14:54 crc kubenswrapper[4875]: I1007 08:14:54.446370 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/35b9d27a-06f2-4b00-917f-f078fdf1b1c2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "35b9d27a-06f2-4b00-917f-f078fdf1b1c2" (UID: "35b9d27a-06f2-4b00-917f-f078fdf1b1c2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:14:54 crc kubenswrapper[4875]: I1007 08:14:54.450990 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/35b9d27a-06f2-4b00-917f-f078fdf1b1c2-scripts" (OuterVolumeSpecName: "scripts") pod "35b9d27a-06f2-4b00-917f-f078fdf1b1c2" (UID: "35b9d27a-06f2-4b00-917f-f078fdf1b1c2"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 08:14:54 crc kubenswrapper[4875]: I1007 08:14:54.464497 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/35b9d27a-06f2-4b00-917f-f078fdf1b1c2-config-data" (OuterVolumeSpecName: "config-data") pod "35b9d27a-06f2-4b00-917f-f078fdf1b1c2" (UID: "35b9d27a-06f2-4b00-917f-f078fdf1b1c2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 08:14:54 crc kubenswrapper[4875]: I1007 08:14:54.477123 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/35b9d27a-06f2-4b00-917f-f078fdf1b1c2-horizon-tls-certs" (OuterVolumeSpecName: "horizon-tls-certs") pod "35b9d27a-06f2-4b00-917f-f078fdf1b1c2" (UID: "35b9d27a-06f2-4b00-917f-f078fdf1b1c2"). InnerVolumeSpecName "horizon-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:14:54 crc kubenswrapper[4875]: I1007 08:14:54.501694 4875 generic.go:334] "Generic (PLEG): container finished" podID="35b9d27a-06f2-4b00-917f-f078fdf1b1c2" containerID="eea62b4426de789b955235a7b6786d9e1687399a33512d2eac8353e901f57601" exitCode=137 Oct 07 08:14:54 crc kubenswrapper[4875]: I1007 08:14:54.501784 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-8486dbbd8b-5n679" Oct 07 08:14:54 crc kubenswrapper[4875]: I1007 08:14:54.501819 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-8486dbbd8b-5n679" event={"ID":"35b9d27a-06f2-4b00-917f-f078fdf1b1c2","Type":"ContainerDied","Data":"eea62b4426de789b955235a7b6786d9e1687399a33512d2eac8353e901f57601"} Oct 07 08:14:54 crc kubenswrapper[4875]: I1007 08:14:54.501930 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-8486dbbd8b-5n679" event={"ID":"35b9d27a-06f2-4b00-917f-f078fdf1b1c2","Type":"ContainerDied","Data":"365ccf00ad28ca893aa6bcec5d42a4fc409283767b29a5a8a61a3c1fde717c41"} Oct 07 08:14:54 crc kubenswrapper[4875]: I1007 08:14:54.502003 4875 scope.go:117] "RemoveContainer" containerID="84ac59b9e48b4282c66bf5edf3989e3485afe16efb106940292657dba1fad9f1" Oct 07 08:14:54 crc kubenswrapper[4875]: I1007 08:14:54.513776 4875 reconciler_common.go:293] "Volume detached for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/35b9d27a-06f2-4b00-917f-f078fdf1b1c2-horizon-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 07 08:14:54 crc kubenswrapper[4875]: I1007 08:14:54.513810 4875 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/35b9d27a-06f2-4b00-917f-f078fdf1b1c2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 08:14:54 crc kubenswrapper[4875]: I1007 08:14:54.513823 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2j5r4\" (UniqueName: \"kubernetes.io/projected/35b9d27a-06f2-4b00-917f-f078fdf1b1c2-kube-api-access-2j5r4\") on node \"crc\" DevicePath \"\"" Oct 07 08:14:54 crc kubenswrapper[4875]: I1007 08:14:54.513835 4875 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/35b9d27a-06f2-4b00-917f-f078fdf1b1c2-logs\") on node \"crc\" DevicePath \"\"" Oct 07 08:14:54 crc kubenswrapper[4875]: I1007 08:14:54.513849 4875 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/35b9d27a-06f2-4b00-917f-f078fdf1b1c2-config-data\") on node \"crc\" DevicePath \"\"" Oct 07 08:14:54 crc kubenswrapper[4875]: I1007 08:14:54.513858 4875 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/35b9d27a-06f2-4b00-917f-f078fdf1b1c2-scripts\") on node \"crc\" DevicePath \"\"" Oct 07 08:14:54 crc kubenswrapper[4875]: I1007 08:14:54.513904 4875 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/35b9d27a-06f2-4b00-917f-f078fdf1b1c2-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Oct 07 08:14:54 crc kubenswrapper[4875]: I1007 08:14:54.569395 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 07 08:14:54 crc kubenswrapper[4875]: I1007 08:14:54.591279 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-8486dbbd8b-5n679"] Oct 07 08:14:54 crc kubenswrapper[4875]: I1007 08:14:54.605778 4875 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-8486dbbd8b-5n679"] Oct 07 08:14:54 crc kubenswrapper[4875]: I1007 08:14:54.643335 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-8bg6r"] Oct 07 08:14:54 crc kubenswrapper[4875]: I1007 08:14:54.784526 4875 scope.go:117] "RemoveContainer" containerID="eea62b4426de789b955235a7b6786d9e1687399a33512d2eac8353e901f57601" Oct 07 08:14:54 crc kubenswrapper[4875]: W1007 08:14:54.791552 4875 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6d0b6a9b_b0f6_4a35_8864_c1ee479584ad.slice/crio-a2ed1f10c9c2814ae4aa0998f36e645252867d403c20971dd3bb20a760baf3bc WatchSource:0}: Error finding container a2ed1f10c9c2814ae4aa0998f36e645252867d403c20971dd3bb20a760baf3bc: Status 404 returned error can't find the container with id a2ed1f10c9c2814ae4aa0998f36e645252867d403c20971dd3bb20a760baf3bc Oct 07 08:14:54 crc kubenswrapper[4875]: W1007 08:14:54.803081 4875 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3f6edd8b_6c78_4ab9_9cbc_49b66303467d.slice/crio-b37b98c64677c27cd0d4d358c448c8bf4e6e0e46637bc521f96bf3ac12e24d52 WatchSource:0}: Error finding container b37b98c64677c27cd0d4d358c448c8bf4e6e0e46637bc521f96bf3ac12e24d52: Status 404 returned error can't find the container with id b37b98c64677c27cd0d4d358c448c8bf4e6e0e46637bc521f96bf3ac12e24d52 Oct 07 08:14:54 crc kubenswrapper[4875]: I1007 08:14:54.834544 4875 scope.go:117] "RemoveContainer" containerID="84ac59b9e48b4282c66bf5edf3989e3485afe16efb106940292657dba1fad9f1" Oct 07 08:14:54 crc kubenswrapper[4875]: E1007 08:14:54.835990 4875 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"84ac59b9e48b4282c66bf5edf3989e3485afe16efb106940292657dba1fad9f1\": container with ID starting with 84ac59b9e48b4282c66bf5edf3989e3485afe16efb106940292657dba1fad9f1 not found: ID does not exist" containerID="84ac59b9e48b4282c66bf5edf3989e3485afe16efb106940292657dba1fad9f1" Oct 07 08:14:54 crc kubenswrapper[4875]: I1007 08:14:54.836094 4875 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"84ac59b9e48b4282c66bf5edf3989e3485afe16efb106940292657dba1fad9f1"} err="failed to get container status \"84ac59b9e48b4282c66bf5edf3989e3485afe16efb106940292657dba1fad9f1\": rpc error: code = NotFound desc = could not find container \"84ac59b9e48b4282c66bf5edf3989e3485afe16efb106940292657dba1fad9f1\": container with ID starting with 84ac59b9e48b4282c66bf5edf3989e3485afe16efb106940292657dba1fad9f1 not found: ID does not exist" Oct 07 08:14:54 crc kubenswrapper[4875]: I1007 08:14:54.836157 4875 scope.go:117] "RemoveContainer" containerID="eea62b4426de789b955235a7b6786d9e1687399a33512d2eac8353e901f57601" Oct 07 08:14:54 crc kubenswrapper[4875]: E1007 08:14:54.837274 4875 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"eea62b4426de789b955235a7b6786d9e1687399a33512d2eac8353e901f57601\": container with ID starting with eea62b4426de789b955235a7b6786d9e1687399a33512d2eac8353e901f57601 not found: ID does not exist" containerID="eea62b4426de789b955235a7b6786d9e1687399a33512d2eac8353e901f57601" Oct 07 08:14:54 crc kubenswrapper[4875]: I1007 08:14:54.837354 4875 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eea62b4426de789b955235a7b6786d9e1687399a33512d2eac8353e901f57601"} err="failed to get container status \"eea62b4426de789b955235a7b6786d9e1687399a33512d2eac8353e901f57601\": rpc error: code = NotFound desc = could not find container \"eea62b4426de789b955235a7b6786d9e1687399a33512d2eac8353e901f57601\": container with ID starting with eea62b4426de789b955235a7b6786d9e1687399a33512d2eac8353e901f57601 not found: ID does not exist" Oct 07 08:14:55 crc kubenswrapper[4875]: I1007 08:14:55.075140 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-7877fc45f6-htlwc" Oct 07 08:14:55 crc kubenswrapper[4875]: I1007 08:14:55.236204 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2129b276-6537-40de-b872-ac05e2ab8545-combined-ca-bundle\") pod \"2129b276-6537-40de-b872-ac05e2ab8545\" (UID: \"2129b276-6537-40de-b872-ac05e2ab8545\") " Oct 07 08:14:55 crc kubenswrapper[4875]: I1007 08:14:55.236377 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/2129b276-6537-40de-b872-ac05e2ab8545-httpd-config\") pod \"2129b276-6537-40de-b872-ac05e2ab8545\" (UID: \"2129b276-6537-40de-b872-ac05e2ab8545\") " Oct 07 08:14:55 crc kubenswrapper[4875]: I1007 08:14:55.236572 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/2129b276-6537-40de-b872-ac05e2ab8545-ovndb-tls-certs\") pod \"2129b276-6537-40de-b872-ac05e2ab8545\" (UID: \"2129b276-6537-40de-b872-ac05e2ab8545\") " Oct 07 08:14:55 crc kubenswrapper[4875]: I1007 08:14:55.236606 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8jdc8\" (UniqueName: \"kubernetes.io/projected/2129b276-6537-40de-b872-ac05e2ab8545-kube-api-access-8jdc8\") pod \"2129b276-6537-40de-b872-ac05e2ab8545\" (UID: \"2129b276-6537-40de-b872-ac05e2ab8545\") " Oct 07 08:14:55 crc kubenswrapper[4875]: I1007 08:14:55.236644 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/2129b276-6537-40de-b872-ac05e2ab8545-config\") pod \"2129b276-6537-40de-b872-ac05e2ab8545\" (UID: \"2129b276-6537-40de-b872-ac05e2ab8545\") " Oct 07 08:14:55 crc kubenswrapper[4875]: I1007 08:14:55.245439 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2129b276-6537-40de-b872-ac05e2ab8545-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "2129b276-6537-40de-b872-ac05e2ab8545" (UID: "2129b276-6537-40de-b872-ac05e2ab8545"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:14:55 crc kubenswrapper[4875]: I1007 08:14:55.246636 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2129b276-6537-40de-b872-ac05e2ab8545-kube-api-access-8jdc8" (OuterVolumeSpecName: "kube-api-access-8jdc8") pod "2129b276-6537-40de-b872-ac05e2ab8545" (UID: "2129b276-6537-40de-b872-ac05e2ab8545"). InnerVolumeSpecName "kube-api-access-8jdc8". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 08:14:55 crc kubenswrapper[4875]: I1007 08:14:55.317488 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2129b276-6537-40de-b872-ac05e2ab8545-config" (OuterVolumeSpecName: "config") pod "2129b276-6537-40de-b872-ac05e2ab8545" (UID: "2129b276-6537-40de-b872-ac05e2ab8545"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:14:55 crc kubenswrapper[4875]: I1007 08:14:55.347413 4875 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/2129b276-6537-40de-b872-ac05e2ab8545-httpd-config\") on node \"crc\" DevicePath \"\"" Oct 07 08:14:55 crc kubenswrapper[4875]: I1007 08:14:55.347457 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8jdc8\" (UniqueName: \"kubernetes.io/projected/2129b276-6537-40de-b872-ac05e2ab8545-kube-api-access-8jdc8\") on node \"crc\" DevicePath \"\"" Oct 07 08:14:55 crc kubenswrapper[4875]: I1007 08:14:55.347470 4875 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/2129b276-6537-40de-b872-ac05e2ab8545-config\") on node \"crc\" DevicePath \"\"" Oct 07 08:14:55 crc kubenswrapper[4875]: I1007 08:14:55.366217 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2129b276-6537-40de-b872-ac05e2ab8545-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2129b276-6537-40de-b872-ac05e2ab8545" (UID: "2129b276-6537-40de-b872-ac05e2ab8545"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:14:55 crc kubenswrapper[4875]: I1007 08:14:55.378570 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2129b276-6537-40de-b872-ac05e2ab8545-ovndb-tls-certs" (OuterVolumeSpecName: "ovndb-tls-certs") pod "2129b276-6537-40de-b872-ac05e2ab8545" (UID: "2129b276-6537-40de-b872-ac05e2ab8545"). InnerVolumeSpecName "ovndb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:14:55 crc kubenswrapper[4875]: I1007 08:14:55.450108 4875 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2129b276-6537-40de-b872-ac05e2ab8545-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 08:14:55 crc kubenswrapper[4875]: I1007 08:14:55.450165 4875 reconciler_common.go:293] "Volume detached for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/2129b276-6537-40de-b872-ac05e2ab8545-ovndb-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 07 08:14:55 crc kubenswrapper[4875]: I1007 08:14:55.573648 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-8bg6r" event={"ID":"3f6edd8b-6c78-4ab9-9cbc-49b66303467d","Type":"ContainerStarted","Data":"b37b98c64677c27cd0d4d358c448c8bf4e6e0e46637bc521f96bf3ac12e24d52"} Oct 07 08:14:55 crc kubenswrapper[4875]: I1007 08:14:55.584561 4875 generic.go:334] "Generic (PLEG): container finished" podID="2129b276-6537-40de-b872-ac05e2ab8545" containerID="ff09e4f768783f0fed861e45b6062891be0bc258db1014051f91d46dcf9a004e" exitCode=0 Oct 07 08:14:55 crc kubenswrapper[4875]: I1007 08:14:55.584758 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-7877fc45f6-htlwc" Oct 07 08:14:55 crc kubenswrapper[4875]: I1007 08:14:55.584932 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-7877fc45f6-htlwc" event={"ID":"2129b276-6537-40de-b872-ac05e2ab8545","Type":"ContainerDied","Data":"ff09e4f768783f0fed861e45b6062891be0bc258db1014051f91d46dcf9a004e"} Oct 07 08:14:55 crc kubenswrapper[4875]: I1007 08:14:55.585015 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-7877fc45f6-htlwc" event={"ID":"2129b276-6537-40de-b872-ac05e2ab8545","Type":"ContainerDied","Data":"1551e682f2409479b3b6a1be5bdb3aa79810776167e8f0e81ceae50848229b5a"} Oct 07 08:14:55 crc kubenswrapper[4875]: I1007 08:14:55.585045 4875 scope.go:117] "RemoveContainer" containerID="501dc74d741d9300fa6e4807d5c33c2ec669f39513ddf518b0053b24ce213cee" Oct 07 08:14:55 crc kubenswrapper[4875]: I1007 08:14:55.587688 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6d0b6a9b-b0f6-4a35-8864-c1ee479584ad","Type":"ContainerStarted","Data":"ce9aaedc6273f3fa2665700ce33115fb2ef8d04b2a1b9f22bddacd8117ea1c89"} Oct 07 08:14:55 crc kubenswrapper[4875]: I1007 08:14:55.587724 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6d0b6a9b-b0f6-4a35-8864-c1ee479584ad","Type":"ContainerStarted","Data":"a2ed1f10c9c2814ae4aa0998f36e645252867d403c20971dd3bb20a760baf3bc"} Oct 07 08:14:55 crc kubenswrapper[4875]: I1007 08:14:55.623871 4875 scope.go:117] "RemoveContainer" containerID="ff09e4f768783f0fed861e45b6062891be0bc258db1014051f91d46dcf9a004e" Oct 07 08:14:55 crc kubenswrapper[4875]: I1007 08:14:55.654469 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-7877fc45f6-htlwc"] Oct 07 08:14:55 crc kubenswrapper[4875]: I1007 08:14:55.670847 4875 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-7877fc45f6-htlwc"] Oct 07 08:14:55 crc kubenswrapper[4875]: I1007 08:14:55.674062 4875 scope.go:117] "RemoveContainer" containerID="501dc74d741d9300fa6e4807d5c33c2ec669f39513ddf518b0053b24ce213cee" Oct 07 08:14:55 crc kubenswrapper[4875]: E1007 08:14:55.674758 4875 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"501dc74d741d9300fa6e4807d5c33c2ec669f39513ddf518b0053b24ce213cee\": container with ID starting with 501dc74d741d9300fa6e4807d5c33c2ec669f39513ddf518b0053b24ce213cee not found: ID does not exist" containerID="501dc74d741d9300fa6e4807d5c33c2ec669f39513ddf518b0053b24ce213cee" Oct 07 08:14:55 crc kubenswrapper[4875]: I1007 08:14:55.674809 4875 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"501dc74d741d9300fa6e4807d5c33c2ec669f39513ddf518b0053b24ce213cee"} err="failed to get container status \"501dc74d741d9300fa6e4807d5c33c2ec669f39513ddf518b0053b24ce213cee\": rpc error: code = NotFound desc = could not find container \"501dc74d741d9300fa6e4807d5c33c2ec669f39513ddf518b0053b24ce213cee\": container with ID starting with 501dc74d741d9300fa6e4807d5c33c2ec669f39513ddf518b0053b24ce213cee not found: ID does not exist" Oct 07 08:14:55 crc kubenswrapper[4875]: I1007 08:14:55.674842 4875 scope.go:117] "RemoveContainer" containerID="ff09e4f768783f0fed861e45b6062891be0bc258db1014051f91d46dcf9a004e" Oct 07 08:14:55 crc kubenswrapper[4875]: E1007 08:14:55.675261 4875 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ff09e4f768783f0fed861e45b6062891be0bc258db1014051f91d46dcf9a004e\": container with ID starting with ff09e4f768783f0fed861e45b6062891be0bc258db1014051f91d46dcf9a004e not found: ID does not exist" containerID="ff09e4f768783f0fed861e45b6062891be0bc258db1014051f91d46dcf9a004e" Oct 07 08:14:55 crc kubenswrapper[4875]: I1007 08:14:55.675291 4875 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ff09e4f768783f0fed861e45b6062891be0bc258db1014051f91d46dcf9a004e"} err="failed to get container status \"ff09e4f768783f0fed861e45b6062891be0bc258db1014051f91d46dcf9a004e\": rpc error: code = NotFound desc = could not find container \"ff09e4f768783f0fed861e45b6062891be0bc258db1014051f91d46dcf9a004e\": container with ID starting with ff09e4f768783f0fed861e45b6062891be0bc258db1014051f91d46dcf9a004e not found: ID does not exist" Oct 07 08:14:55 crc kubenswrapper[4875]: I1007 08:14:55.732996 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2129b276-6537-40de-b872-ac05e2ab8545" path="/var/lib/kubelet/pods/2129b276-6537-40de-b872-ac05e2ab8545/volumes" Oct 07 08:14:55 crc kubenswrapper[4875]: I1007 08:14:55.733871 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="35b9d27a-06f2-4b00-917f-f078fdf1b1c2" path="/var/lib/kubelet/pods/35b9d27a-06f2-4b00-917f-f078fdf1b1c2/volumes" Oct 07 08:14:56 crc kubenswrapper[4875]: I1007 08:14:56.605139 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6d0b6a9b-b0f6-4a35-8864-c1ee479584ad","Type":"ContainerStarted","Data":"76b93e0b875b2d2d0f74dc9a6fa5c4aac8377a5347a1cc8996cb5420e6472bd4"} Oct 07 08:14:57 crc kubenswrapper[4875]: I1007 08:14:57.621824 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6d0b6a9b-b0f6-4a35-8864-c1ee479584ad","Type":"ContainerStarted","Data":"63e938d159ea8e0c387d1593fc50a516bccb3e704f9694e1e8a2ba237ecf73d1"} Oct 07 08:14:58 crc kubenswrapper[4875]: I1007 08:14:58.423459 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-1ccd-account-create-8gkv6"] Oct 07 08:14:58 crc kubenswrapper[4875]: E1007 08:14:58.424365 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="35b9d27a-06f2-4b00-917f-f078fdf1b1c2" containerName="horizon" Oct 07 08:14:58 crc kubenswrapper[4875]: I1007 08:14:58.424386 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="35b9d27a-06f2-4b00-917f-f078fdf1b1c2" containerName="horizon" Oct 07 08:14:58 crc kubenswrapper[4875]: E1007 08:14:58.424402 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2129b276-6537-40de-b872-ac05e2ab8545" containerName="neutron-api" Oct 07 08:14:58 crc kubenswrapper[4875]: I1007 08:14:58.424410 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="2129b276-6537-40de-b872-ac05e2ab8545" containerName="neutron-api" Oct 07 08:14:58 crc kubenswrapper[4875]: E1007 08:14:58.424438 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="35b9d27a-06f2-4b00-917f-f078fdf1b1c2" containerName="horizon-log" Oct 07 08:14:58 crc kubenswrapper[4875]: I1007 08:14:58.424448 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="35b9d27a-06f2-4b00-917f-f078fdf1b1c2" containerName="horizon-log" Oct 07 08:14:58 crc kubenswrapper[4875]: E1007 08:14:58.424464 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2129b276-6537-40de-b872-ac05e2ab8545" containerName="neutron-httpd" Oct 07 08:14:58 crc kubenswrapper[4875]: I1007 08:14:58.424473 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="2129b276-6537-40de-b872-ac05e2ab8545" containerName="neutron-httpd" Oct 07 08:14:58 crc kubenswrapper[4875]: I1007 08:14:58.424689 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="35b9d27a-06f2-4b00-917f-f078fdf1b1c2" containerName="horizon" Oct 07 08:14:58 crc kubenswrapper[4875]: I1007 08:14:58.424711 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="2129b276-6537-40de-b872-ac05e2ab8545" containerName="neutron-httpd" Oct 07 08:14:58 crc kubenswrapper[4875]: I1007 08:14:58.424722 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="2129b276-6537-40de-b872-ac05e2ab8545" containerName="neutron-api" Oct 07 08:14:58 crc kubenswrapper[4875]: I1007 08:14:58.424748 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="35b9d27a-06f2-4b00-917f-f078fdf1b1c2" containerName="horizon-log" Oct 07 08:14:58 crc kubenswrapper[4875]: I1007 08:14:58.425501 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-1ccd-account-create-8gkv6" Oct 07 08:14:58 crc kubenswrapper[4875]: I1007 08:14:58.428816 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-db-secret" Oct 07 08:14:58 crc kubenswrapper[4875]: I1007 08:14:58.435052 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-1ccd-account-create-8gkv6"] Oct 07 08:14:58 crc kubenswrapper[4875]: I1007 08:14:58.435309 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p4v46\" (UniqueName: \"kubernetes.io/projected/4861a958-1085-4ebf-a62a-8566468cbdad-kube-api-access-p4v46\") pod \"nova-cell1-1ccd-account-create-8gkv6\" (UID: \"4861a958-1085-4ebf-a62a-8566468cbdad\") " pod="openstack/nova-cell1-1ccd-account-create-8gkv6" Oct 07 08:14:58 crc kubenswrapper[4875]: I1007 08:14:58.538584 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p4v46\" (UniqueName: \"kubernetes.io/projected/4861a958-1085-4ebf-a62a-8566468cbdad-kube-api-access-p4v46\") pod \"nova-cell1-1ccd-account-create-8gkv6\" (UID: \"4861a958-1085-4ebf-a62a-8566468cbdad\") " pod="openstack/nova-cell1-1ccd-account-create-8gkv6" Oct 07 08:14:58 crc kubenswrapper[4875]: I1007 08:14:58.558932 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p4v46\" (UniqueName: \"kubernetes.io/projected/4861a958-1085-4ebf-a62a-8566468cbdad-kube-api-access-p4v46\") pod \"nova-cell1-1ccd-account-create-8gkv6\" (UID: \"4861a958-1085-4ebf-a62a-8566468cbdad\") " pod="openstack/nova-cell1-1ccd-account-create-8gkv6" Oct 07 08:14:58 crc kubenswrapper[4875]: I1007 08:14:58.746580 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-1ccd-account-create-8gkv6" Oct 07 08:15:00 crc kubenswrapper[4875]: I1007 08:15:00.154786 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29330415-pdssx"] Oct 07 08:15:00 crc kubenswrapper[4875]: I1007 08:15:00.156966 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29330415-pdssx" Oct 07 08:15:00 crc kubenswrapper[4875]: I1007 08:15:00.160126 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Oct 07 08:15:00 crc kubenswrapper[4875]: I1007 08:15:00.160937 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Oct 07 08:15:00 crc kubenswrapper[4875]: I1007 08:15:00.180321 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29330415-pdssx"] Oct 07 08:15:00 crc kubenswrapper[4875]: I1007 08:15:00.193943 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bpxnh\" (UniqueName: \"kubernetes.io/projected/d4efbc99-08f2-418f-a6e0-d7b17f655810-kube-api-access-bpxnh\") pod \"collect-profiles-29330415-pdssx\" (UID: \"d4efbc99-08f2-418f-a6e0-d7b17f655810\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29330415-pdssx" Oct 07 08:15:00 crc kubenswrapper[4875]: I1007 08:15:00.194197 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/d4efbc99-08f2-418f-a6e0-d7b17f655810-config-volume\") pod \"collect-profiles-29330415-pdssx\" (UID: \"d4efbc99-08f2-418f-a6e0-d7b17f655810\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29330415-pdssx" Oct 07 08:15:00 crc kubenswrapper[4875]: I1007 08:15:00.194494 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/d4efbc99-08f2-418f-a6e0-d7b17f655810-secret-volume\") pod \"collect-profiles-29330415-pdssx\" (UID: \"d4efbc99-08f2-418f-a6e0-d7b17f655810\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29330415-pdssx" Oct 07 08:15:00 crc kubenswrapper[4875]: I1007 08:15:00.296577 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/d4efbc99-08f2-418f-a6e0-d7b17f655810-secret-volume\") pod \"collect-profiles-29330415-pdssx\" (UID: \"d4efbc99-08f2-418f-a6e0-d7b17f655810\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29330415-pdssx" Oct 07 08:15:00 crc kubenswrapper[4875]: I1007 08:15:00.296667 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bpxnh\" (UniqueName: \"kubernetes.io/projected/d4efbc99-08f2-418f-a6e0-d7b17f655810-kube-api-access-bpxnh\") pod \"collect-profiles-29330415-pdssx\" (UID: \"d4efbc99-08f2-418f-a6e0-d7b17f655810\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29330415-pdssx" Oct 07 08:15:00 crc kubenswrapper[4875]: I1007 08:15:00.296723 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/d4efbc99-08f2-418f-a6e0-d7b17f655810-config-volume\") pod \"collect-profiles-29330415-pdssx\" (UID: \"d4efbc99-08f2-418f-a6e0-d7b17f655810\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29330415-pdssx" Oct 07 08:15:00 crc kubenswrapper[4875]: I1007 08:15:00.297810 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/d4efbc99-08f2-418f-a6e0-d7b17f655810-config-volume\") pod \"collect-profiles-29330415-pdssx\" (UID: \"d4efbc99-08f2-418f-a6e0-d7b17f655810\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29330415-pdssx" Oct 07 08:15:00 crc kubenswrapper[4875]: I1007 08:15:00.320917 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/d4efbc99-08f2-418f-a6e0-d7b17f655810-secret-volume\") pod \"collect-profiles-29330415-pdssx\" (UID: \"d4efbc99-08f2-418f-a6e0-d7b17f655810\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29330415-pdssx" Oct 07 08:15:00 crc kubenswrapper[4875]: I1007 08:15:00.328745 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bpxnh\" (UniqueName: \"kubernetes.io/projected/d4efbc99-08f2-418f-a6e0-d7b17f655810-kube-api-access-bpxnh\") pod \"collect-profiles-29330415-pdssx\" (UID: \"d4efbc99-08f2-418f-a6e0-d7b17f655810\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29330415-pdssx" Oct 07 08:15:00 crc kubenswrapper[4875]: I1007 08:15:00.500962 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29330415-pdssx" Oct 07 08:15:01 crc kubenswrapper[4875]: I1007 08:15:01.221168 4875 patch_prober.go:28] interesting pod/machine-config-daemon-hx68m container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 07 08:15:01 crc kubenswrapper[4875]: I1007 08:15:01.221616 4875 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" podUID="3928c10c-c3da-41eb-96b2-629d67cfb31f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 07 08:15:01 crc kubenswrapper[4875]: I1007 08:15:01.221684 4875 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" Oct 07 08:15:01 crc kubenswrapper[4875]: I1007 08:15:01.222815 4875 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"933ac15074ad9aef219ca9dd266d407f01a604b86a5e807215e08ee271925427"} pod="openshift-machine-config-operator/machine-config-daemon-hx68m" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 07 08:15:01 crc kubenswrapper[4875]: I1007 08:15:01.222903 4875 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" podUID="3928c10c-c3da-41eb-96b2-629d67cfb31f" containerName="machine-config-daemon" containerID="cri-o://933ac15074ad9aef219ca9dd266d407f01a604b86a5e807215e08ee271925427" gracePeriod=600 Oct 07 08:15:01 crc kubenswrapper[4875]: I1007 08:15:01.675330 4875 generic.go:334] "Generic (PLEG): container finished" podID="3928c10c-c3da-41eb-96b2-629d67cfb31f" containerID="933ac15074ad9aef219ca9dd266d407f01a604b86a5e807215e08ee271925427" exitCode=0 Oct 07 08:15:01 crc kubenswrapper[4875]: I1007 08:15:01.675403 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" event={"ID":"3928c10c-c3da-41eb-96b2-629d67cfb31f","Type":"ContainerDied","Data":"933ac15074ad9aef219ca9dd266d407f01a604b86a5e807215e08ee271925427"} Oct 07 08:15:01 crc kubenswrapper[4875]: I1007 08:15:01.675511 4875 scope.go:117] "RemoveContainer" containerID="74477d8e6df862c07a1243437b7c34a1f70af65519c0c2ffe0b07caf6d4382f4" Oct 07 08:15:02 crc kubenswrapper[4875]: I1007 08:15:02.243020 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 07 08:15:04 crc kubenswrapper[4875]: I1007 08:15:04.056763 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-1ccd-account-create-8gkv6"] Oct 07 08:15:04 crc kubenswrapper[4875]: W1007 08:15:04.067166 4875 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd4efbc99_08f2_418f_a6e0_d7b17f655810.slice/crio-f3cab6c8d3d5c03b2466563aaa990093d699eb08be3a6acf1f57efc9c190fb47 WatchSource:0}: Error finding container f3cab6c8d3d5c03b2466563aaa990093d699eb08be3a6acf1f57efc9c190fb47: Status 404 returned error can't find the container with id f3cab6c8d3d5c03b2466563aaa990093d699eb08be3a6acf1f57efc9c190fb47 Oct 07 08:15:04 crc kubenswrapper[4875]: I1007 08:15:04.095747 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29330415-pdssx"] Oct 07 08:15:04 crc kubenswrapper[4875]: I1007 08:15:04.711386 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6d0b6a9b-b0f6-4a35-8864-c1ee479584ad","Type":"ContainerStarted","Data":"dd78c9155d1b48b3d3ebbfc6e7bbcdc623c33202681dc7ceb4d56d158ee1623d"} Oct 07 08:15:04 crc kubenswrapper[4875]: I1007 08:15:04.712304 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Oct 07 08:15:04 crc kubenswrapper[4875]: I1007 08:15:04.711565 4875 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="6d0b6a9b-b0f6-4a35-8864-c1ee479584ad" containerName="sg-core" containerID="cri-o://63e938d159ea8e0c387d1593fc50a516bccb3e704f9694e1e8a2ba237ecf73d1" gracePeriod=30 Oct 07 08:15:04 crc kubenswrapper[4875]: I1007 08:15:04.711627 4875 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="6d0b6a9b-b0f6-4a35-8864-c1ee479584ad" containerName="proxy-httpd" containerID="cri-o://dd78c9155d1b48b3d3ebbfc6e7bbcdc623c33202681dc7ceb4d56d158ee1623d" gracePeriod=30 Oct 07 08:15:04 crc kubenswrapper[4875]: I1007 08:15:04.711621 4875 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="6d0b6a9b-b0f6-4a35-8864-c1ee479584ad" containerName="ceilometer-notification-agent" containerID="cri-o://76b93e0b875b2d2d0f74dc9a6fa5c4aac8377a5347a1cc8996cb5420e6472bd4" gracePeriod=30 Oct 07 08:15:04 crc kubenswrapper[4875]: I1007 08:15:04.711468 4875 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="6d0b6a9b-b0f6-4a35-8864-c1ee479584ad" containerName="ceilometer-central-agent" containerID="cri-o://ce9aaedc6273f3fa2665700ce33115fb2ef8d04b2a1b9f22bddacd8117ea1c89" gracePeriod=30 Oct 07 08:15:04 crc kubenswrapper[4875]: I1007 08:15:04.722935 4875 generic.go:334] "Generic (PLEG): container finished" podID="4861a958-1085-4ebf-a62a-8566468cbdad" containerID="27e74e6769fb1785312527ebfca9300df57c740afc9ed08b65ad7431632db5ed" exitCode=0 Oct 07 08:15:04 crc kubenswrapper[4875]: I1007 08:15:04.723288 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-1ccd-account-create-8gkv6" event={"ID":"4861a958-1085-4ebf-a62a-8566468cbdad","Type":"ContainerDied","Data":"27e74e6769fb1785312527ebfca9300df57c740afc9ed08b65ad7431632db5ed"} Oct 07 08:15:04 crc kubenswrapper[4875]: I1007 08:15:04.723362 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-1ccd-account-create-8gkv6" event={"ID":"4861a958-1085-4ebf-a62a-8566468cbdad","Type":"ContainerStarted","Data":"4c6d6a1e50b6a25a4d10dcf54977a56607dd936198b78d8c661f0d3d31916ef1"} Oct 07 08:15:04 crc kubenswrapper[4875]: I1007 08:15:04.738348 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" event={"ID":"3928c10c-c3da-41eb-96b2-629d67cfb31f","Type":"ContainerStarted","Data":"55efb217a04e70bc6e7faba22ad468c623c75c0fd9c6f3ce56027a9559dc9a47"} Oct 07 08:15:04 crc kubenswrapper[4875]: I1007 08:15:04.746803 4875 generic.go:334] "Generic (PLEG): container finished" podID="d4efbc99-08f2-418f-a6e0-d7b17f655810" containerID="fc184331fd467ad772a2a8313e1c0c81b3662f1178cef81c72e8eb6c46b7993e" exitCode=0 Oct 07 08:15:04 crc kubenswrapper[4875]: I1007 08:15:04.746970 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29330415-pdssx" event={"ID":"d4efbc99-08f2-418f-a6e0-d7b17f655810","Type":"ContainerDied","Data":"fc184331fd467ad772a2a8313e1c0c81b3662f1178cef81c72e8eb6c46b7993e"} Oct 07 08:15:04 crc kubenswrapper[4875]: I1007 08:15:04.747005 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29330415-pdssx" event={"ID":"d4efbc99-08f2-418f-a6e0-d7b17f655810","Type":"ContainerStarted","Data":"f3cab6c8d3d5c03b2466563aaa990093d699eb08be3a6acf1f57efc9c190fb47"} Oct 07 08:15:04 crc kubenswrapper[4875]: E1007 08:15:04.753919 4875 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4861a958_1085_4ebf_a62a_8566468cbdad.slice/crio-27e74e6769fb1785312527ebfca9300df57c740afc9ed08b65ad7431632db5ed.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4861a958_1085_4ebf_a62a_8566468cbdad.slice/crio-conmon-27e74e6769fb1785312527ebfca9300df57c740afc9ed08b65ad7431632db5ed.scope\": RecentStats: unable to find data in memory cache]" Oct 07 08:15:04 crc kubenswrapper[4875]: I1007 08:15:04.756834 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-8bg6r" event={"ID":"3f6edd8b-6c78-4ab9-9cbc-49b66303467d","Type":"ContainerStarted","Data":"251b6fba44dd7d45dde7ca92d7e21074816028b46c47c0cd064b7c7e3a921777"} Oct 07 08:15:04 crc kubenswrapper[4875]: I1007 08:15:04.757201 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=3.041673974 podStartE2EDuration="11.757183318s" podCreationTimestamp="2025-10-07 08:14:53 +0000 UTC" firstStartedPulling="2025-10-07 08:14:54.799193137 +0000 UTC m=+1119.758963680" lastFinishedPulling="2025-10-07 08:15:03.514702481 +0000 UTC m=+1128.474473024" observedRunningTime="2025-10-07 08:15:04.733378758 +0000 UTC m=+1129.693149311" watchObservedRunningTime="2025-10-07 08:15:04.757183318 +0000 UTC m=+1129.716953861" Oct 07 08:15:04 crc kubenswrapper[4875]: I1007 08:15:04.809463 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-db-sync-8bg6r" podStartSLOduration=3.131198177 podStartE2EDuration="11.809441229s" podCreationTimestamp="2025-10-07 08:14:53 +0000 UTC" firstStartedPulling="2025-10-07 08:14:54.834377491 +0000 UTC m=+1119.794148034" lastFinishedPulling="2025-10-07 08:15:03.512620543 +0000 UTC m=+1128.472391086" observedRunningTime="2025-10-07 08:15:04.798207581 +0000 UTC m=+1129.757978144" watchObservedRunningTime="2025-10-07 08:15:04.809441229 +0000 UTC m=+1129.769211772" Oct 07 08:15:05 crc kubenswrapper[4875]: I1007 08:15:05.774840 4875 generic.go:334] "Generic (PLEG): container finished" podID="6d0b6a9b-b0f6-4a35-8864-c1ee479584ad" containerID="dd78c9155d1b48b3d3ebbfc6e7bbcdc623c33202681dc7ceb4d56d158ee1623d" exitCode=0 Oct 07 08:15:05 crc kubenswrapper[4875]: I1007 08:15:05.775684 4875 generic.go:334] "Generic (PLEG): container finished" podID="6d0b6a9b-b0f6-4a35-8864-c1ee479584ad" containerID="63e938d159ea8e0c387d1593fc50a516bccb3e704f9694e1e8a2ba237ecf73d1" exitCode=2 Oct 07 08:15:05 crc kubenswrapper[4875]: I1007 08:15:05.775696 4875 generic.go:334] "Generic (PLEG): container finished" podID="6d0b6a9b-b0f6-4a35-8864-c1ee479584ad" containerID="76b93e0b875b2d2d0f74dc9a6fa5c4aac8377a5347a1cc8996cb5420e6472bd4" exitCode=0 Oct 07 08:15:05 crc kubenswrapper[4875]: I1007 08:15:05.775706 4875 generic.go:334] "Generic (PLEG): container finished" podID="6d0b6a9b-b0f6-4a35-8864-c1ee479584ad" containerID="ce9aaedc6273f3fa2665700ce33115fb2ef8d04b2a1b9f22bddacd8117ea1c89" exitCode=0 Oct 07 08:15:05 crc kubenswrapper[4875]: I1007 08:15:05.776123 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6d0b6a9b-b0f6-4a35-8864-c1ee479584ad","Type":"ContainerDied","Data":"dd78c9155d1b48b3d3ebbfc6e7bbcdc623c33202681dc7ceb4d56d158ee1623d"} Oct 07 08:15:05 crc kubenswrapper[4875]: I1007 08:15:05.776194 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6d0b6a9b-b0f6-4a35-8864-c1ee479584ad","Type":"ContainerDied","Data":"63e938d159ea8e0c387d1593fc50a516bccb3e704f9694e1e8a2ba237ecf73d1"} Oct 07 08:15:05 crc kubenswrapper[4875]: I1007 08:15:05.776206 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6d0b6a9b-b0f6-4a35-8864-c1ee479584ad","Type":"ContainerDied","Data":"76b93e0b875b2d2d0f74dc9a6fa5c4aac8377a5347a1cc8996cb5420e6472bd4"} Oct 07 08:15:05 crc kubenswrapper[4875]: I1007 08:15:05.776216 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6d0b6a9b-b0f6-4a35-8864-c1ee479584ad","Type":"ContainerDied","Data":"ce9aaedc6273f3fa2665700ce33115fb2ef8d04b2a1b9f22bddacd8117ea1c89"} Oct 07 08:15:05 crc kubenswrapper[4875]: I1007 08:15:05.776228 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6d0b6a9b-b0f6-4a35-8864-c1ee479584ad","Type":"ContainerDied","Data":"a2ed1f10c9c2814ae4aa0998f36e645252867d403c20971dd3bb20a760baf3bc"} Oct 07 08:15:05 crc kubenswrapper[4875]: I1007 08:15:05.776241 4875 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a2ed1f10c9c2814ae4aa0998f36e645252867d403c20971dd3bb20a760baf3bc" Oct 07 08:15:05 crc kubenswrapper[4875]: I1007 08:15:05.856060 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 07 08:15:05 crc kubenswrapper[4875]: I1007 08:15:05.930115 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/6d0b6a9b-b0f6-4a35-8864-c1ee479584ad-sg-core-conf-yaml\") pod \"6d0b6a9b-b0f6-4a35-8864-c1ee479584ad\" (UID: \"6d0b6a9b-b0f6-4a35-8864-c1ee479584ad\") " Oct 07 08:15:05 crc kubenswrapper[4875]: I1007 08:15:05.930288 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6d0b6a9b-b0f6-4a35-8864-c1ee479584ad-config-data\") pod \"6d0b6a9b-b0f6-4a35-8864-c1ee479584ad\" (UID: \"6d0b6a9b-b0f6-4a35-8864-c1ee479584ad\") " Oct 07 08:15:05 crc kubenswrapper[4875]: I1007 08:15:05.930387 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6d0b6a9b-b0f6-4a35-8864-c1ee479584ad-log-httpd\") pod \"6d0b6a9b-b0f6-4a35-8864-c1ee479584ad\" (UID: \"6d0b6a9b-b0f6-4a35-8864-c1ee479584ad\") " Oct 07 08:15:05 crc kubenswrapper[4875]: I1007 08:15:05.930448 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6d0b6a9b-b0f6-4a35-8864-c1ee479584ad-combined-ca-bundle\") pod \"6d0b6a9b-b0f6-4a35-8864-c1ee479584ad\" (UID: \"6d0b6a9b-b0f6-4a35-8864-c1ee479584ad\") " Oct 07 08:15:05 crc kubenswrapper[4875]: I1007 08:15:05.930594 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6d0b6a9b-b0f6-4a35-8864-c1ee479584ad-run-httpd\") pod \"6d0b6a9b-b0f6-4a35-8864-c1ee479584ad\" (UID: \"6d0b6a9b-b0f6-4a35-8864-c1ee479584ad\") " Oct 07 08:15:05 crc kubenswrapper[4875]: I1007 08:15:05.930631 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6d0b6a9b-b0f6-4a35-8864-c1ee479584ad-scripts\") pod \"6d0b6a9b-b0f6-4a35-8864-c1ee479584ad\" (UID: \"6d0b6a9b-b0f6-4a35-8864-c1ee479584ad\") " Oct 07 08:15:05 crc kubenswrapper[4875]: I1007 08:15:05.930683 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-58nwh\" (UniqueName: \"kubernetes.io/projected/6d0b6a9b-b0f6-4a35-8864-c1ee479584ad-kube-api-access-58nwh\") pod \"6d0b6a9b-b0f6-4a35-8864-c1ee479584ad\" (UID: \"6d0b6a9b-b0f6-4a35-8864-c1ee479584ad\") " Oct 07 08:15:05 crc kubenswrapper[4875]: I1007 08:15:05.931140 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6d0b6a9b-b0f6-4a35-8864-c1ee479584ad-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "6d0b6a9b-b0f6-4a35-8864-c1ee479584ad" (UID: "6d0b6a9b-b0f6-4a35-8864-c1ee479584ad"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 08:15:05 crc kubenswrapper[4875]: I1007 08:15:05.931206 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6d0b6a9b-b0f6-4a35-8864-c1ee479584ad-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "6d0b6a9b-b0f6-4a35-8864-c1ee479584ad" (UID: "6d0b6a9b-b0f6-4a35-8864-c1ee479584ad"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 08:15:05 crc kubenswrapper[4875]: I1007 08:15:05.931799 4875 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6d0b6a9b-b0f6-4a35-8864-c1ee479584ad-run-httpd\") on node \"crc\" DevicePath \"\"" Oct 07 08:15:05 crc kubenswrapper[4875]: I1007 08:15:05.931821 4875 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6d0b6a9b-b0f6-4a35-8864-c1ee479584ad-log-httpd\") on node \"crc\" DevicePath \"\"" Oct 07 08:15:05 crc kubenswrapper[4875]: I1007 08:15:05.941934 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6d0b6a9b-b0f6-4a35-8864-c1ee479584ad-kube-api-access-58nwh" (OuterVolumeSpecName: "kube-api-access-58nwh") pod "6d0b6a9b-b0f6-4a35-8864-c1ee479584ad" (UID: "6d0b6a9b-b0f6-4a35-8864-c1ee479584ad"). InnerVolumeSpecName "kube-api-access-58nwh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 08:15:05 crc kubenswrapper[4875]: I1007 08:15:05.952270 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6d0b6a9b-b0f6-4a35-8864-c1ee479584ad-scripts" (OuterVolumeSpecName: "scripts") pod "6d0b6a9b-b0f6-4a35-8864-c1ee479584ad" (UID: "6d0b6a9b-b0f6-4a35-8864-c1ee479584ad"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:15:05 crc kubenswrapper[4875]: I1007 08:15:05.984485 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6d0b6a9b-b0f6-4a35-8864-c1ee479584ad-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "6d0b6a9b-b0f6-4a35-8864-c1ee479584ad" (UID: "6d0b6a9b-b0f6-4a35-8864-c1ee479584ad"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:15:06 crc kubenswrapper[4875]: I1007 08:15:06.033086 4875 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6d0b6a9b-b0f6-4a35-8864-c1ee479584ad-scripts\") on node \"crc\" DevicePath \"\"" Oct 07 08:15:06 crc kubenswrapper[4875]: I1007 08:15:06.033119 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-58nwh\" (UniqueName: \"kubernetes.io/projected/6d0b6a9b-b0f6-4a35-8864-c1ee479584ad-kube-api-access-58nwh\") on node \"crc\" DevicePath \"\"" Oct 07 08:15:06 crc kubenswrapper[4875]: I1007 08:15:06.033131 4875 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/6d0b6a9b-b0f6-4a35-8864-c1ee479584ad-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Oct 07 08:15:06 crc kubenswrapper[4875]: I1007 08:15:06.060030 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-1ccd-account-create-8gkv6" Oct 07 08:15:06 crc kubenswrapper[4875]: I1007 08:15:06.134101 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p4v46\" (UniqueName: \"kubernetes.io/projected/4861a958-1085-4ebf-a62a-8566468cbdad-kube-api-access-p4v46\") pod \"4861a958-1085-4ebf-a62a-8566468cbdad\" (UID: \"4861a958-1085-4ebf-a62a-8566468cbdad\") " Oct 07 08:15:06 crc kubenswrapper[4875]: I1007 08:15:06.135786 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6d0b6a9b-b0f6-4a35-8864-c1ee479584ad-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6d0b6a9b-b0f6-4a35-8864-c1ee479584ad" (UID: "6d0b6a9b-b0f6-4a35-8864-c1ee479584ad"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:15:06 crc kubenswrapper[4875]: I1007 08:15:06.140285 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4861a958-1085-4ebf-a62a-8566468cbdad-kube-api-access-p4v46" (OuterVolumeSpecName: "kube-api-access-p4v46") pod "4861a958-1085-4ebf-a62a-8566468cbdad" (UID: "4861a958-1085-4ebf-a62a-8566468cbdad"). InnerVolumeSpecName "kube-api-access-p4v46". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 08:15:06 crc kubenswrapper[4875]: I1007 08:15:06.157123 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6d0b6a9b-b0f6-4a35-8864-c1ee479584ad-config-data" (OuterVolumeSpecName: "config-data") pod "6d0b6a9b-b0f6-4a35-8864-c1ee479584ad" (UID: "6d0b6a9b-b0f6-4a35-8864-c1ee479584ad"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:15:06 crc kubenswrapper[4875]: I1007 08:15:06.236946 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p4v46\" (UniqueName: \"kubernetes.io/projected/4861a958-1085-4ebf-a62a-8566468cbdad-kube-api-access-p4v46\") on node \"crc\" DevicePath \"\"" Oct 07 08:15:06 crc kubenswrapper[4875]: I1007 08:15:06.237001 4875 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6d0b6a9b-b0f6-4a35-8864-c1ee479584ad-config-data\") on node \"crc\" DevicePath \"\"" Oct 07 08:15:06 crc kubenswrapper[4875]: I1007 08:15:06.237016 4875 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6d0b6a9b-b0f6-4a35-8864-c1ee479584ad-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 08:15:06 crc kubenswrapper[4875]: I1007 08:15:06.251530 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29330415-pdssx" Oct 07 08:15:06 crc kubenswrapper[4875]: I1007 08:15:06.338073 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/d4efbc99-08f2-418f-a6e0-d7b17f655810-secret-volume\") pod \"d4efbc99-08f2-418f-a6e0-d7b17f655810\" (UID: \"d4efbc99-08f2-418f-a6e0-d7b17f655810\") " Oct 07 08:15:06 crc kubenswrapper[4875]: I1007 08:15:06.338147 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bpxnh\" (UniqueName: \"kubernetes.io/projected/d4efbc99-08f2-418f-a6e0-d7b17f655810-kube-api-access-bpxnh\") pod \"d4efbc99-08f2-418f-a6e0-d7b17f655810\" (UID: \"d4efbc99-08f2-418f-a6e0-d7b17f655810\") " Oct 07 08:15:06 crc kubenswrapper[4875]: I1007 08:15:06.338547 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/d4efbc99-08f2-418f-a6e0-d7b17f655810-config-volume\") pod \"d4efbc99-08f2-418f-a6e0-d7b17f655810\" (UID: \"d4efbc99-08f2-418f-a6e0-d7b17f655810\") " Oct 07 08:15:06 crc kubenswrapper[4875]: I1007 08:15:06.339418 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d4efbc99-08f2-418f-a6e0-d7b17f655810-config-volume" (OuterVolumeSpecName: "config-volume") pod "d4efbc99-08f2-418f-a6e0-d7b17f655810" (UID: "d4efbc99-08f2-418f-a6e0-d7b17f655810"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 08:15:06 crc kubenswrapper[4875]: I1007 08:15:06.342449 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d4efbc99-08f2-418f-a6e0-d7b17f655810-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "d4efbc99-08f2-418f-a6e0-d7b17f655810" (UID: "d4efbc99-08f2-418f-a6e0-d7b17f655810"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:15:06 crc kubenswrapper[4875]: I1007 08:15:06.343235 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d4efbc99-08f2-418f-a6e0-d7b17f655810-kube-api-access-bpxnh" (OuterVolumeSpecName: "kube-api-access-bpxnh") pod "d4efbc99-08f2-418f-a6e0-d7b17f655810" (UID: "d4efbc99-08f2-418f-a6e0-d7b17f655810"). InnerVolumeSpecName "kube-api-access-bpxnh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 08:15:06 crc kubenswrapper[4875]: I1007 08:15:06.442549 4875 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/d4efbc99-08f2-418f-a6e0-d7b17f655810-secret-volume\") on node \"crc\" DevicePath \"\"" Oct 07 08:15:06 crc kubenswrapper[4875]: I1007 08:15:06.442596 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bpxnh\" (UniqueName: \"kubernetes.io/projected/d4efbc99-08f2-418f-a6e0-d7b17f655810-kube-api-access-bpxnh\") on node \"crc\" DevicePath \"\"" Oct 07 08:15:06 crc kubenswrapper[4875]: I1007 08:15:06.442607 4875 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/d4efbc99-08f2-418f-a6e0-d7b17f655810-config-volume\") on node \"crc\" DevicePath \"\"" Oct 07 08:15:06 crc kubenswrapper[4875]: I1007 08:15:06.786969 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-1ccd-account-create-8gkv6" event={"ID":"4861a958-1085-4ebf-a62a-8566468cbdad","Type":"ContainerDied","Data":"4c6d6a1e50b6a25a4d10dcf54977a56607dd936198b78d8c661f0d3d31916ef1"} Oct 07 08:15:06 crc kubenswrapper[4875]: I1007 08:15:06.787019 4875 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4c6d6a1e50b6a25a4d10dcf54977a56607dd936198b78d8c661f0d3d31916ef1" Oct 07 08:15:06 crc kubenswrapper[4875]: I1007 08:15:06.787112 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-1ccd-account-create-8gkv6" Oct 07 08:15:06 crc kubenswrapper[4875]: I1007 08:15:06.802997 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 07 08:15:06 crc kubenswrapper[4875]: I1007 08:15:06.803431 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29330415-pdssx" event={"ID":"d4efbc99-08f2-418f-a6e0-d7b17f655810","Type":"ContainerDied","Data":"f3cab6c8d3d5c03b2466563aaa990093d699eb08be3a6acf1f57efc9c190fb47"} Oct 07 08:15:06 crc kubenswrapper[4875]: I1007 08:15:06.803544 4875 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f3cab6c8d3d5c03b2466563aaa990093d699eb08be3a6acf1f57efc9c190fb47" Oct 07 08:15:06 crc kubenswrapper[4875]: I1007 08:15:06.803057 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29330415-pdssx" Oct 07 08:15:06 crc kubenswrapper[4875]: I1007 08:15:06.851344 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 07 08:15:06 crc kubenswrapper[4875]: I1007 08:15:06.863250 4875 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Oct 07 08:15:06 crc kubenswrapper[4875]: I1007 08:15:06.891113 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Oct 07 08:15:06 crc kubenswrapper[4875]: E1007 08:15:06.891527 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6d0b6a9b-b0f6-4a35-8864-c1ee479584ad" containerName="proxy-httpd" Oct 07 08:15:06 crc kubenswrapper[4875]: I1007 08:15:06.891548 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="6d0b6a9b-b0f6-4a35-8864-c1ee479584ad" containerName="proxy-httpd" Oct 07 08:15:06 crc kubenswrapper[4875]: E1007 08:15:06.891590 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d4efbc99-08f2-418f-a6e0-d7b17f655810" containerName="collect-profiles" Oct 07 08:15:06 crc kubenswrapper[4875]: I1007 08:15:06.891599 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="d4efbc99-08f2-418f-a6e0-d7b17f655810" containerName="collect-profiles" Oct 07 08:15:06 crc kubenswrapper[4875]: E1007 08:15:06.891617 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6d0b6a9b-b0f6-4a35-8864-c1ee479584ad" containerName="ceilometer-notification-agent" Oct 07 08:15:06 crc kubenswrapper[4875]: I1007 08:15:06.891625 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="6d0b6a9b-b0f6-4a35-8864-c1ee479584ad" containerName="ceilometer-notification-agent" Oct 07 08:15:06 crc kubenswrapper[4875]: E1007 08:15:06.891637 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6d0b6a9b-b0f6-4a35-8864-c1ee479584ad" containerName="ceilometer-central-agent" Oct 07 08:15:06 crc kubenswrapper[4875]: I1007 08:15:06.891645 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="6d0b6a9b-b0f6-4a35-8864-c1ee479584ad" containerName="ceilometer-central-agent" Oct 07 08:15:06 crc kubenswrapper[4875]: E1007 08:15:06.891653 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6d0b6a9b-b0f6-4a35-8864-c1ee479584ad" containerName="sg-core" Oct 07 08:15:06 crc kubenswrapper[4875]: I1007 08:15:06.891659 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="6d0b6a9b-b0f6-4a35-8864-c1ee479584ad" containerName="sg-core" Oct 07 08:15:06 crc kubenswrapper[4875]: E1007 08:15:06.891671 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4861a958-1085-4ebf-a62a-8566468cbdad" containerName="mariadb-account-create" Oct 07 08:15:06 crc kubenswrapper[4875]: I1007 08:15:06.891678 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="4861a958-1085-4ebf-a62a-8566468cbdad" containerName="mariadb-account-create" Oct 07 08:15:06 crc kubenswrapper[4875]: I1007 08:15:06.891870 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="4861a958-1085-4ebf-a62a-8566468cbdad" containerName="mariadb-account-create" Oct 07 08:15:06 crc kubenswrapper[4875]: I1007 08:15:06.891913 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="6d0b6a9b-b0f6-4a35-8864-c1ee479584ad" containerName="sg-core" Oct 07 08:15:06 crc kubenswrapper[4875]: I1007 08:15:06.891925 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="6d0b6a9b-b0f6-4a35-8864-c1ee479584ad" containerName="ceilometer-central-agent" Oct 07 08:15:06 crc kubenswrapper[4875]: I1007 08:15:06.891937 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="6d0b6a9b-b0f6-4a35-8864-c1ee479584ad" containerName="ceilometer-notification-agent" Oct 07 08:15:06 crc kubenswrapper[4875]: I1007 08:15:06.891950 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="6d0b6a9b-b0f6-4a35-8864-c1ee479584ad" containerName="proxy-httpd" Oct 07 08:15:06 crc kubenswrapper[4875]: I1007 08:15:06.891960 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="d4efbc99-08f2-418f-a6e0-d7b17f655810" containerName="collect-profiles" Oct 07 08:15:06 crc kubenswrapper[4875]: I1007 08:15:06.894416 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 07 08:15:06 crc kubenswrapper[4875]: I1007 08:15:06.896666 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Oct 07 08:15:06 crc kubenswrapper[4875]: I1007 08:15:06.896763 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Oct 07 08:15:06 crc kubenswrapper[4875]: I1007 08:15:06.908364 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 07 08:15:06 crc kubenswrapper[4875]: I1007 08:15:06.955292 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d4968869-ac3f-4633-90f1-fc6693f3bb9c-log-httpd\") pod \"ceilometer-0\" (UID: \"d4968869-ac3f-4633-90f1-fc6693f3bb9c\") " pod="openstack/ceilometer-0" Oct 07 08:15:06 crc kubenswrapper[4875]: I1007 08:15:06.955352 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d4968869-ac3f-4633-90f1-fc6693f3bb9c-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"d4968869-ac3f-4633-90f1-fc6693f3bb9c\") " pod="openstack/ceilometer-0" Oct 07 08:15:06 crc kubenswrapper[4875]: I1007 08:15:06.955608 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d4968869-ac3f-4633-90f1-fc6693f3bb9c-config-data\") pod \"ceilometer-0\" (UID: \"d4968869-ac3f-4633-90f1-fc6693f3bb9c\") " pod="openstack/ceilometer-0" Oct 07 08:15:06 crc kubenswrapper[4875]: I1007 08:15:06.955913 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wxdbt\" (UniqueName: \"kubernetes.io/projected/d4968869-ac3f-4633-90f1-fc6693f3bb9c-kube-api-access-wxdbt\") pod \"ceilometer-0\" (UID: \"d4968869-ac3f-4633-90f1-fc6693f3bb9c\") " pod="openstack/ceilometer-0" Oct 07 08:15:06 crc kubenswrapper[4875]: I1007 08:15:06.956260 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d4968869-ac3f-4633-90f1-fc6693f3bb9c-scripts\") pod \"ceilometer-0\" (UID: \"d4968869-ac3f-4633-90f1-fc6693f3bb9c\") " pod="openstack/ceilometer-0" Oct 07 08:15:06 crc kubenswrapper[4875]: I1007 08:15:06.956368 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d4968869-ac3f-4633-90f1-fc6693f3bb9c-run-httpd\") pod \"ceilometer-0\" (UID: \"d4968869-ac3f-4633-90f1-fc6693f3bb9c\") " pod="openstack/ceilometer-0" Oct 07 08:15:06 crc kubenswrapper[4875]: I1007 08:15:06.956448 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/d4968869-ac3f-4633-90f1-fc6693f3bb9c-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"d4968869-ac3f-4633-90f1-fc6693f3bb9c\") " pod="openstack/ceilometer-0" Oct 07 08:15:07 crc kubenswrapper[4875]: I1007 08:15:07.059326 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wxdbt\" (UniqueName: \"kubernetes.io/projected/d4968869-ac3f-4633-90f1-fc6693f3bb9c-kube-api-access-wxdbt\") pod \"ceilometer-0\" (UID: \"d4968869-ac3f-4633-90f1-fc6693f3bb9c\") " pod="openstack/ceilometer-0" Oct 07 08:15:07 crc kubenswrapper[4875]: I1007 08:15:07.059481 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d4968869-ac3f-4633-90f1-fc6693f3bb9c-scripts\") pod \"ceilometer-0\" (UID: \"d4968869-ac3f-4633-90f1-fc6693f3bb9c\") " pod="openstack/ceilometer-0" Oct 07 08:15:07 crc kubenswrapper[4875]: I1007 08:15:07.059527 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d4968869-ac3f-4633-90f1-fc6693f3bb9c-run-httpd\") pod \"ceilometer-0\" (UID: \"d4968869-ac3f-4633-90f1-fc6693f3bb9c\") " pod="openstack/ceilometer-0" Oct 07 08:15:07 crc kubenswrapper[4875]: I1007 08:15:07.059557 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/d4968869-ac3f-4633-90f1-fc6693f3bb9c-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"d4968869-ac3f-4633-90f1-fc6693f3bb9c\") " pod="openstack/ceilometer-0" Oct 07 08:15:07 crc kubenswrapper[4875]: I1007 08:15:07.059644 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d4968869-ac3f-4633-90f1-fc6693f3bb9c-log-httpd\") pod \"ceilometer-0\" (UID: \"d4968869-ac3f-4633-90f1-fc6693f3bb9c\") " pod="openstack/ceilometer-0" Oct 07 08:15:07 crc kubenswrapper[4875]: I1007 08:15:07.059681 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d4968869-ac3f-4633-90f1-fc6693f3bb9c-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"d4968869-ac3f-4633-90f1-fc6693f3bb9c\") " pod="openstack/ceilometer-0" Oct 07 08:15:07 crc kubenswrapper[4875]: I1007 08:15:07.059746 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d4968869-ac3f-4633-90f1-fc6693f3bb9c-config-data\") pod \"ceilometer-0\" (UID: \"d4968869-ac3f-4633-90f1-fc6693f3bb9c\") " pod="openstack/ceilometer-0" Oct 07 08:15:07 crc kubenswrapper[4875]: I1007 08:15:07.060349 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d4968869-ac3f-4633-90f1-fc6693f3bb9c-run-httpd\") pod \"ceilometer-0\" (UID: \"d4968869-ac3f-4633-90f1-fc6693f3bb9c\") " pod="openstack/ceilometer-0" Oct 07 08:15:07 crc kubenswrapper[4875]: I1007 08:15:07.060393 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d4968869-ac3f-4633-90f1-fc6693f3bb9c-log-httpd\") pod \"ceilometer-0\" (UID: \"d4968869-ac3f-4633-90f1-fc6693f3bb9c\") " pod="openstack/ceilometer-0" Oct 07 08:15:07 crc kubenswrapper[4875]: I1007 08:15:07.067102 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d4968869-ac3f-4633-90f1-fc6693f3bb9c-scripts\") pod \"ceilometer-0\" (UID: \"d4968869-ac3f-4633-90f1-fc6693f3bb9c\") " pod="openstack/ceilometer-0" Oct 07 08:15:07 crc kubenswrapper[4875]: I1007 08:15:07.068432 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d4968869-ac3f-4633-90f1-fc6693f3bb9c-config-data\") pod \"ceilometer-0\" (UID: \"d4968869-ac3f-4633-90f1-fc6693f3bb9c\") " pod="openstack/ceilometer-0" Oct 07 08:15:07 crc kubenswrapper[4875]: I1007 08:15:07.073149 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d4968869-ac3f-4633-90f1-fc6693f3bb9c-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"d4968869-ac3f-4633-90f1-fc6693f3bb9c\") " pod="openstack/ceilometer-0" Oct 07 08:15:07 crc kubenswrapper[4875]: I1007 08:15:07.073839 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/d4968869-ac3f-4633-90f1-fc6693f3bb9c-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"d4968869-ac3f-4633-90f1-fc6693f3bb9c\") " pod="openstack/ceilometer-0" Oct 07 08:15:07 crc kubenswrapper[4875]: I1007 08:15:07.084737 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wxdbt\" (UniqueName: \"kubernetes.io/projected/d4968869-ac3f-4633-90f1-fc6693f3bb9c-kube-api-access-wxdbt\") pod \"ceilometer-0\" (UID: \"d4968869-ac3f-4633-90f1-fc6693f3bb9c\") " pod="openstack/ceilometer-0" Oct 07 08:15:07 crc kubenswrapper[4875]: I1007 08:15:07.215563 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 07 08:15:07 crc kubenswrapper[4875]: W1007 08:15:07.716192 4875 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd4968869_ac3f_4633_90f1_fc6693f3bb9c.slice/crio-65d3bcbf066697b4db7f058bb1fa6101954726060b15e5daba2a70b54cec87bf WatchSource:0}: Error finding container 65d3bcbf066697b4db7f058bb1fa6101954726060b15e5daba2a70b54cec87bf: Status 404 returned error can't find the container with id 65d3bcbf066697b4db7f058bb1fa6101954726060b15e5daba2a70b54cec87bf Oct 07 08:15:07 crc kubenswrapper[4875]: I1007 08:15:07.721229 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6d0b6a9b-b0f6-4a35-8864-c1ee479584ad" path="/var/lib/kubelet/pods/6d0b6a9b-b0f6-4a35-8864-c1ee479584ad/volumes" Oct 07 08:15:07 crc kubenswrapper[4875]: I1007 08:15:07.723789 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 07 08:15:07 crc kubenswrapper[4875]: I1007 08:15:07.817654 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d4968869-ac3f-4633-90f1-fc6693f3bb9c","Type":"ContainerStarted","Data":"65d3bcbf066697b4db7f058bb1fa6101954726060b15e5daba2a70b54cec87bf"} Oct 07 08:15:08 crc kubenswrapper[4875]: I1007 08:15:08.833983 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d4968869-ac3f-4633-90f1-fc6693f3bb9c","Type":"ContainerStarted","Data":"79c4b37a19daebd87b9fdd58b70ee72208fbd418f48a1ba05c26f39375b43bc1"} Oct 07 08:15:09 crc kubenswrapper[4875]: I1007 08:15:09.844435 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d4968869-ac3f-4633-90f1-fc6693f3bb9c","Type":"ContainerStarted","Data":"b203b7b46c3a6530c1f6c377edb8e1ede0c43a982a428b4e0e7bf6472855c246"} Oct 07 08:15:10 crc kubenswrapper[4875]: I1007 08:15:10.858769 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d4968869-ac3f-4633-90f1-fc6693f3bb9c","Type":"ContainerStarted","Data":"2a8f508aadda7ae7a9c6b0a3d5b1b96b99a073a67849768188fdd27984ab5341"} Oct 07 08:15:11 crc kubenswrapper[4875]: I1007 08:15:11.873602 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d4968869-ac3f-4633-90f1-fc6693f3bb9c","Type":"ContainerStarted","Data":"d62efcc86f780b5479f8486d0ea49d84fa3bc7c148295c5cae739a9b00cdc7f7"} Oct 07 08:15:11 crc kubenswrapper[4875]: I1007 08:15:11.874184 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Oct 07 08:15:11 crc kubenswrapper[4875]: I1007 08:15:11.903208 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 07 08:15:11 crc kubenswrapper[4875]: I1007 08:15:11.903933 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.236954314 podStartE2EDuration="5.903907883s" podCreationTimestamp="2025-10-07 08:15:06 +0000 UTC" firstStartedPulling="2025-10-07 08:15:07.719605905 +0000 UTC m=+1132.679376448" lastFinishedPulling="2025-10-07 08:15:11.386559454 +0000 UTC m=+1136.346330017" observedRunningTime="2025-10-07 08:15:11.902014782 +0000 UTC m=+1136.861785345" watchObservedRunningTime="2025-10-07 08:15:11.903907883 +0000 UTC m=+1136.863678446" Oct 07 08:15:11 crc kubenswrapper[4875]: I1007 08:15:11.904106 4875 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="9b9f241f-e30b-416b-8a42-666f2fc72a79" containerName="glance-httpd" containerID="cri-o://ee0762175ec002306406846fa61e8ec3c525abc7f2c9f45265e8498ebc9ad8e8" gracePeriod=30 Oct 07 08:15:11 crc kubenswrapper[4875]: I1007 08:15:11.904154 4875 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="9b9f241f-e30b-416b-8a42-666f2fc72a79" containerName="glance-log" containerID="cri-o://a226cfeffaff8aef7ebf6ff95b2220c436fff77223cda6da6581a118b7a391e5" gracePeriod=30 Oct 07 08:15:12 crc kubenswrapper[4875]: I1007 08:15:12.888525 4875 generic.go:334] "Generic (PLEG): container finished" podID="9b9f241f-e30b-416b-8a42-666f2fc72a79" containerID="a226cfeffaff8aef7ebf6ff95b2220c436fff77223cda6da6581a118b7a391e5" exitCode=143 Oct 07 08:15:12 crc kubenswrapper[4875]: I1007 08:15:12.888633 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"9b9f241f-e30b-416b-8a42-666f2fc72a79","Type":"ContainerDied","Data":"a226cfeffaff8aef7ebf6ff95b2220c436fff77223cda6da6581a118b7a391e5"} Oct 07 08:15:14 crc kubenswrapper[4875]: I1007 08:15:14.153576 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 07 08:15:14 crc kubenswrapper[4875]: I1007 08:15:14.154146 4875 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="626af307-8e17-427d-a18f-70ad5e8cc62f" containerName="glance-httpd" containerID="cri-o://9db3ed5127c7d7c091668dd5d498fbdc413bd8ff20bb88b82512985c882a85da" gracePeriod=30 Oct 07 08:15:14 crc kubenswrapper[4875]: I1007 08:15:14.154481 4875 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="626af307-8e17-427d-a18f-70ad5e8cc62f" containerName="glance-log" containerID="cri-o://464d53d8f3c0d9fdd8d6d00b578f9bfe9ea60a8cdc1d474d4094081662663848" gracePeriod=30 Oct 07 08:15:14 crc kubenswrapper[4875]: I1007 08:15:14.921183 4875 generic.go:334] "Generic (PLEG): container finished" podID="626af307-8e17-427d-a18f-70ad5e8cc62f" containerID="464d53d8f3c0d9fdd8d6d00b578f9bfe9ea60a8cdc1d474d4094081662663848" exitCode=143 Oct 07 08:15:14 crc kubenswrapper[4875]: I1007 08:15:14.921715 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"626af307-8e17-427d-a18f-70ad5e8cc62f","Type":"ContainerDied","Data":"464d53d8f3c0d9fdd8d6d00b578f9bfe9ea60a8cdc1d474d4094081662663848"} Oct 07 08:15:15 crc kubenswrapper[4875]: I1007 08:15:15.060775 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 07 08:15:15 crc kubenswrapper[4875]: I1007 08:15:15.061223 4875 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="d4968869-ac3f-4633-90f1-fc6693f3bb9c" containerName="ceilometer-central-agent" containerID="cri-o://79c4b37a19daebd87b9fdd58b70ee72208fbd418f48a1ba05c26f39375b43bc1" gracePeriod=30 Oct 07 08:15:15 crc kubenswrapper[4875]: I1007 08:15:15.062037 4875 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="d4968869-ac3f-4633-90f1-fc6693f3bb9c" containerName="proxy-httpd" containerID="cri-o://d62efcc86f780b5479f8486d0ea49d84fa3bc7c148295c5cae739a9b00cdc7f7" gracePeriod=30 Oct 07 08:15:15 crc kubenswrapper[4875]: I1007 08:15:15.062125 4875 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="d4968869-ac3f-4633-90f1-fc6693f3bb9c" containerName="sg-core" containerID="cri-o://2a8f508aadda7ae7a9c6b0a3d5b1b96b99a073a67849768188fdd27984ab5341" gracePeriod=30 Oct 07 08:15:15 crc kubenswrapper[4875]: I1007 08:15:15.062189 4875 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="d4968869-ac3f-4633-90f1-fc6693f3bb9c" containerName="ceilometer-notification-agent" containerID="cri-o://b203b7b46c3a6530c1f6c377edb8e1ede0c43a982a428b4e0e7bf6472855c246" gracePeriod=30 Oct 07 08:15:15 crc kubenswrapper[4875]: I1007 08:15:15.661466 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 07 08:15:15 crc kubenswrapper[4875]: I1007 08:15:15.750288 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9b9f241f-e30b-416b-8a42-666f2fc72a79-combined-ca-bundle\") pod \"9b9f241f-e30b-416b-8a42-666f2fc72a79\" (UID: \"9b9f241f-e30b-416b-8a42-666f2fc72a79\") " Oct 07 08:15:15 crc kubenswrapper[4875]: I1007 08:15:15.750476 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/9b9f241f-e30b-416b-8a42-666f2fc72a79-public-tls-certs\") pod \"9b9f241f-e30b-416b-8a42-666f2fc72a79\" (UID: \"9b9f241f-e30b-416b-8a42-666f2fc72a79\") " Oct 07 08:15:15 crc kubenswrapper[4875]: I1007 08:15:15.750528 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"9b9f241f-e30b-416b-8a42-666f2fc72a79\" (UID: \"9b9f241f-e30b-416b-8a42-666f2fc72a79\") " Oct 07 08:15:15 crc kubenswrapper[4875]: I1007 08:15:15.750564 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9b9f241f-e30b-416b-8a42-666f2fc72a79-logs\") pod \"9b9f241f-e30b-416b-8a42-666f2fc72a79\" (UID: \"9b9f241f-e30b-416b-8a42-666f2fc72a79\") " Oct 07 08:15:15 crc kubenswrapper[4875]: I1007 08:15:15.750707 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9b9f241f-e30b-416b-8a42-666f2fc72a79-config-data\") pod \"9b9f241f-e30b-416b-8a42-666f2fc72a79\" (UID: \"9b9f241f-e30b-416b-8a42-666f2fc72a79\") " Oct 07 08:15:15 crc kubenswrapper[4875]: I1007 08:15:15.750732 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ttnwr\" (UniqueName: \"kubernetes.io/projected/9b9f241f-e30b-416b-8a42-666f2fc72a79-kube-api-access-ttnwr\") pod \"9b9f241f-e30b-416b-8a42-666f2fc72a79\" (UID: \"9b9f241f-e30b-416b-8a42-666f2fc72a79\") " Oct 07 08:15:15 crc kubenswrapper[4875]: I1007 08:15:15.750827 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/9b9f241f-e30b-416b-8a42-666f2fc72a79-httpd-run\") pod \"9b9f241f-e30b-416b-8a42-666f2fc72a79\" (UID: \"9b9f241f-e30b-416b-8a42-666f2fc72a79\") " Oct 07 08:15:15 crc kubenswrapper[4875]: I1007 08:15:15.750847 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9b9f241f-e30b-416b-8a42-666f2fc72a79-scripts\") pod \"9b9f241f-e30b-416b-8a42-666f2fc72a79\" (UID: \"9b9f241f-e30b-416b-8a42-666f2fc72a79\") " Oct 07 08:15:15 crc kubenswrapper[4875]: I1007 08:15:15.751834 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9b9f241f-e30b-416b-8a42-666f2fc72a79-logs" (OuterVolumeSpecName: "logs") pod "9b9f241f-e30b-416b-8a42-666f2fc72a79" (UID: "9b9f241f-e30b-416b-8a42-666f2fc72a79"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 08:15:15 crc kubenswrapper[4875]: I1007 08:15:15.769821 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9b9f241f-e30b-416b-8a42-666f2fc72a79-kube-api-access-ttnwr" (OuterVolumeSpecName: "kube-api-access-ttnwr") pod "9b9f241f-e30b-416b-8a42-666f2fc72a79" (UID: "9b9f241f-e30b-416b-8a42-666f2fc72a79"). InnerVolumeSpecName "kube-api-access-ttnwr". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 08:15:15 crc kubenswrapper[4875]: I1007 08:15:15.771614 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9b9f241f-e30b-416b-8a42-666f2fc72a79-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "9b9f241f-e30b-416b-8a42-666f2fc72a79" (UID: "9b9f241f-e30b-416b-8a42-666f2fc72a79"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 08:15:15 crc kubenswrapper[4875]: I1007 08:15:15.777252 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9b9f241f-e30b-416b-8a42-666f2fc72a79-scripts" (OuterVolumeSpecName: "scripts") pod "9b9f241f-e30b-416b-8a42-666f2fc72a79" (UID: "9b9f241f-e30b-416b-8a42-666f2fc72a79"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:15:15 crc kubenswrapper[4875]: I1007 08:15:15.823571 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage12-crc" (OuterVolumeSpecName: "glance") pod "9b9f241f-e30b-416b-8a42-666f2fc72a79" (UID: "9b9f241f-e30b-416b-8a42-666f2fc72a79"). InnerVolumeSpecName "local-storage12-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 07 08:15:15 crc kubenswrapper[4875]: I1007 08:15:15.853738 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ttnwr\" (UniqueName: \"kubernetes.io/projected/9b9f241f-e30b-416b-8a42-666f2fc72a79-kube-api-access-ttnwr\") on node \"crc\" DevicePath \"\"" Oct 07 08:15:15 crc kubenswrapper[4875]: I1007 08:15:15.853782 4875 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/9b9f241f-e30b-416b-8a42-666f2fc72a79-httpd-run\") on node \"crc\" DevicePath \"\"" Oct 07 08:15:15 crc kubenswrapper[4875]: I1007 08:15:15.853793 4875 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9b9f241f-e30b-416b-8a42-666f2fc72a79-scripts\") on node \"crc\" DevicePath \"\"" Oct 07 08:15:15 crc kubenswrapper[4875]: I1007 08:15:15.853829 4875 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") on node \"crc\" " Oct 07 08:15:15 crc kubenswrapper[4875]: I1007 08:15:15.853840 4875 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9b9f241f-e30b-416b-8a42-666f2fc72a79-logs\") on node \"crc\" DevicePath \"\"" Oct 07 08:15:15 crc kubenswrapper[4875]: I1007 08:15:15.865388 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9b9f241f-e30b-416b-8a42-666f2fc72a79-config-data" (OuterVolumeSpecName: "config-data") pod "9b9f241f-e30b-416b-8a42-666f2fc72a79" (UID: "9b9f241f-e30b-416b-8a42-666f2fc72a79"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:15:15 crc kubenswrapper[4875]: I1007 08:15:15.881438 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9b9f241f-e30b-416b-8a42-666f2fc72a79-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9b9f241f-e30b-416b-8a42-666f2fc72a79" (UID: "9b9f241f-e30b-416b-8a42-666f2fc72a79"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:15:15 crc kubenswrapper[4875]: I1007 08:15:15.886563 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9b9f241f-e30b-416b-8a42-666f2fc72a79-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "9b9f241f-e30b-416b-8a42-666f2fc72a79" (UID: "9b9f241f-e30b-416b-8a42-666f2fc72a79"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:15:15 crc kubenswrapper[4875]: I1007 08:15:15.906044 4875 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage12-crc" (UniqueName: "kubernetes.io/local-volume/local-storage12-crc") on node "crc" Oct 07 08:15:15 crc kubenswrapper[4875]: I1007 08:15:15.936128 4875 generic.go:334] "Generic (PLEG): container finished" podID="9b9f241f-e30b-416b-8a42-666f2fc72a79" containerID="ee0762175ec002306406846fa61e8ec3c525abc7f2c9f45265e8498ebc9ad8e8" exitCode=0 Oct 07 08:15:15 crc kubenswrapper[4875]: I1007 08:15:15.936217 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"9b9f241f-e30b-416b-8a42-666f2fc72a79","Type":"ContainerDied","Data":"ee0762175ec002306406846fa61e8ec3c525abc7f2c9f45265e8498ebc9ad8e8"} Oct 07 08:15:15 crc kubenswrapper[4875]: I1007 08:15:15.936254 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"9b9f241f-e30b-416b-8a42-666f2fc72a79","Type":"ContainerDied","Data":"e2d678457fa6e068a8dad5a69c489816e1afb7db6be2d66b5dc02afd3c1cfbcc"} Oct 07 08:15:15 crc kubenswrapper[4875]: I1007 08:15:15.936277 4875 scope.go:117] "RemoveContainer" containerID="ee0762175ec002306406846fa61e8ec3c525abc7f2c9f45265e8498ebc9ad8e8" Oct 07 08:15:15 crc kubenswrapper[4875]: I1007 08:15:15.936478 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 07 08:15:15 crc kubenswrapper[4875]: I1007 08:15:15.942857 4875 generic.go:334] "Generic (PLEG): container finished" podID="d4968869-ac3f-4633-90f1-fc6693f3bb9c" containerID="d62efcc86f780b5479f8486d0ea49d84fa3bc7c148295c5cae739a9b00cdc7f7" exitCode=0 Oct 07 08:15:15 crc kubenswrapper[4875]: I1007 08:15:15.943446 4875 generic.go:334] "Generic (PLEG): container finished" podID="d4968869-ac3f-4633-90f1-fc6693f3bb9c" containerID="2a8f508aadda7ae7a9c6b0a3d5b1b96b99a073a67849768188fdd27984ab5341" exitCode=2 Oct 07 08:15:15 crc kubenswrapper[4875]: I1007 08:15:15.943459 4875 generic.go:334] "Generic (PLEG): container finished" podID="d4968869-ac3f-4633-90f1-fc6693f3bb9c" containerID="b203b7b46c3a6530c1f6c377edb8e1ede0c43a982a428b4e0e7bf6472855c246" exitCode=0 Oct 07 08:15:15 crc kubenswrapper[4875]: I1007 08:15:15.943058 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d4968869-ac3f-4633-90f1-fc6693f3bb9c","Type":"ContainerDied","Data":"d62efcc86f780b5479f8486d0ea49d84fa3bc7c148295c5cae739a9b00cdc7f7"} Oct 07 08:15:15 crc kubenswrapper[4875]: I1007 08:15:15.943539 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d4968869-ac3f-4633-90f1-fc6693f3bb9c","Type":"ContainerDied","Data":"2a8f508aadda7ae7a9c6b0a3d5b1b96b99a073a67849768188fdd27984ab5341"} Oct 07 08:15:15 crc kubenswrapper[4875]: I1007 08:15:15.943553 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d4968869-ac3f-4633-90f1-fc6693f3bb9c","Type":"ContainerDied","Data":"b203b7b46c3a6530c1f6c377edb8e1ede0c43a982a428b4e0e7bf6472855c246"} Oct 07 08:15:15 crc kubenswrapper[4875]: I1007 08:15:15.955172 4875 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9b9f241f-e30b-416b-8a42-666f2fc72a79-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 08:15:15 crc kubenswrapper[4875]: I1007 08:15:15.955206 4875 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/9b9f241f-e30b-416b-8a42-666f2fc72a79-public-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 07 08:15:15 crc kubenswrapper[4875]: I1007 08:15:15.955218 4875 reconciler_common.go:293] "Volume detached for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") on node \"crc\" DevicePath \"\"" Oct 07 08:15:15 crc kubenswrapper[4875]: I1007 08:15:15.955230 4875 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9b9f241f-e30b-416b-8a42-666f2fc72a79-config-data\") on node \"crc\" DevicePath \"\"" Oct 07 08:15:15 crc kubenswrapper[4875]: I1007 08:15:15.968129 4875 scope.go:117] "RemoveContainer" containerID="a226cfeffaff8aef7ebf6ff95b2220c436fff77223cda6da6581a118b7a391e5" Oct 07 08:15:15 crc kubenswrapper[4875]: I1007 08:15:15.990784 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 07 08:15:15 crc kubenswrapper[4875]: I1007 08:15:15.999504 4875 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 07 08:15:16 crc kubenswrapper[4875]: I1007 08:15:16.004313 4875 scope.go:117] "RemoveContainer" containerID="ee0762175ec002306406846fa61e8ec3c525abc7f2c9f45265e8498ebc9ad8e8" Oct 07 08:15:16 crc kubenswrapper[4875]: E1007 08:15:16.004765 4875 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ee0762175ec002306406846fa61e8ec3c525abc7f2c9f45265e8498ebc9ad8e8\": container with ID starting with ee0762175ec002306406846fa61e8ec3c525abc7f2c9f45265e8498ebc9ad8e8 not found: ID does not exist" containerID="ee0762175ec002306406846fa61e8ec3c525abc7f2c9f45265e8498ebc9ad8e8" Oct 07 08:15:16 crc kubenswrapper[4875]: I1007 08:15:16.004795 4875 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ee0762175ec002306406846fa61e8ec3c525abc7f2c9f45265e8498ebc9ad8e8"} err="failed to get container status \"ee0762175ec002306406846fa61e8ec3c525abc7f2c9f45265e8498ebc9ad8e8\": rpc error: code = NotFound desc = could not find container \"ee0762175ec002306406846fa61e8ec3c525abc7f2c9f45265e8498ebc9ad8e8\": container with ID starting with ee0762175ec002306406846fa61e8ec3c525abc7f2c9f45265e8498ebc9ad8e8 not found: ID does not exist" Oct 07 08:15:16 crc kubenswrapper[4875]: I1007 08:15:16.004815 4875 scope.go:117] "RemoveContainer" containerID="a226cfeffaff8aef7ebf6ff95b2220c436fff77223cda6da6581a118b7a391e5" Oct 07 08:15:16 crc kubenswrapper[4875]: E1007 08:15:16.005078 4875 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a226cfeffaff8aef7ebf6ff95b2220c436fff77223cda6da6581a118b7a391e5\": container with ID starting with a226cfeffaff8aef7ebf6ff95b2220c436fff77223cda6da6581a118b7a391e5 not found: ID does not exist" containerID="a226cfeffaff8aef7ebf6ff95b2220c436fff77223cda6da6581a118b7a391e5" Oct 07 08:15:16 crc kubenswrapper[4875]: I1007 08:15:16.005108 4875 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a226cfeffaff8aef7ebf6ff95b2220c436fff77223cda6da6581a118b7a391e5"} err="failed to get container status \"a226cfeffaff8aef7ebf6ff95b2220c436fff77223cda6da6581a118b7a391e5\": rpc error: code = NotFound desc = could not find container \"a226cfeffaff8aef7ebf6ff95b2220c436fff77223cda6da6581a118b7a391e5\": container with ID starting with a226cfeffaff8aef7ebf6ff95b2220c436fff77223cda6da6581a118b7a391e5 not found: ID does not exist" Oct 07 08:15:16 crc kubenswrapper[4875]: I1007 08:15:16.022512 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Oct 07 08:15:16 crc kubenswrapper[4875]: E1007 08:15:16.022941 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9b9f241f-e30b-416b-8a42-666f2fc72a79" containerName="glance-httpd" Oct 07 08:15:16 crc kubenswrapper[4875]: I1007 08:15:16.022966 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="9b9f241f-e30b-416b-8a42-666f2fc72a79" containerName="glance-httpd" Oct 07 08:15:16 crc kubenswrapper[4875]: E1007 08:15:16.022986 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9b9f241f-e30b-416b-8a42-666f2fc72a79" containerName="glance-log" Oct 07 08:15:16 crc kubenswrapper[4875]: I1007 08:15:16.022993 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="9b9f241f-e30b-416b-8a42-666f2fc72a79" containerName="glance-log" Oct 07 08:15:16 crc kubenswrapper[4875]: I1007 08:15:16.023164 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="9b9f241f-e30b-416b-8a42-666f2fc72a79" containerName="glance-httpd" Oct 07 08:15:16 crc kubenswrapper[4875]: I1007 08:15:16.023184 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="9b9f241f-e30b-416b-8a42-666f2fc72a79" containerName="glance-log" Oct 07 08:15:16 crc kubenswrapper[4875]: I1007 08:15:16.030555 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 07 08:15:16 crc kubenswrapper[4875]: I1007 08:15:16.033512 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Oct 07 08:15:16 crc kubenswrapper[4875]: I1007 08:15:16.033541 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Oct 07 08:15:16 crc kubenswrapper[4875]: I1007 08:15:16.038176 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 07 08:15:16 crc kubenswrapper[4875]: I1007 08:15:16.158599 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9d4dbc12-0c00-4b2a-ad57-055d19cebf0a-config-data\") pod \"glance-default-external-api-0\" (UID: \"9d4dbc12-0c00-4b2a-ad57-055d19cebf0a\") " pod="openstack/glance-default-external-api-0" Oct 07 08:15:16 crc kubenswrapper[4875]: I1007 08:15:16.158680 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-external-api-0\" (UID: \"9d4dbc12-0c00-4b2a-ad57-055d19cebf0a\") " pod="openstack/glance-default-external-api-0" Oct 07 08:15:16 crc kubenswrapper[4875]: I1007 08:15:16.158729 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9d4dbc12-0c00-4b2a-ad57-055d19cebf0a-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"9d4dbc12-0c00-4b2a-ad57-055d19cebf0a\") " pod="openstack/glance-default-external-api-0" Oct 07 08:15:16 crc kubenswrapper[4875]: I1007 08:15:16.158777 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/9d4dbc12-0c00-4b2a-ad57-055d19cebf0a-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"9d4dbc12-0c00-4b2a-ad57-055d19cebf0a\") " pod="openstack/glance-default-external-api-0" Oct 07 08:15:16 crc kubenswrapper[4875]: I1007 08:15:16.158798 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bd5ng\" (UniqueName: \"kubernetes.io/projected/9d4dbc12-0c00-4b2a-ad57-055d19cebf0a-kube-api-access-bd5ng\") pod \"glance-default-external-api-0\" (UID: \"9d4dbc12-0c00-4b2a-ad57-055d19cebf0a\") " pod="openstack/glance-default-external-api-0" Oct 07 08:15:16 crc kubenswrapper[4875]: I1007 08:15:16.158921 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9d4dbc12-0c00-4b2a-ad57-055d19cebf0a-logs\") pod \"glance-default-external-api-0\" (UID: \"9d4dbc12-0c00-4b2a-ad57-055d19cebf0a\") " pod="openstack/glance-default-external-api-0" Oct 07 08:15:16 crc kubenswrapper[4875]: I1007 08:15:16.159121 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9d4dbc12-0c00-4b2a-ad57-055d19cebf0a-scripts\") pod \"glance-default-external-api-0\" (UID: \"9d4dbc12-0c00-4b2a-ad57-055d19cebf0a\") " pod="openstack/glance-default-external-api-0" Oct 07 08:15:16 crc kubenswrapper[4875]: I1007 08:15:16.159224 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/9d4dbc12-0c00-4b2a-ad57-055d19cebf0a-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"9d4dbc12-0c00-4b2a-ad57-055d19cebf0a\") " pod="openstack/glance-default-external-api-0" Oct 07 08:15:16 crc kubenswrapper[4875]: I1007 08:15:16.260719 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/9d4dbc12-0c00-4b2a-ad57-055d19cebf0a-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"9d4dbc12-0c00-4b2a-ad57-055d19cebf0a\") " pod="openstack/glance-default-external-api-0" Oct 07 08:15:16 crc kubenswrapper[4875]: I1007 08:15:16.260772 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bd5ng\" (UniqueName: \"kubernetes.io/projected/9d4dbc12-0c00-4b2a-ad57-055d19cebf0a-kube-api-access-bd5ng\") pod \"glance-default-external-api-0\" (UID: \"9d4dbc12-0c00-4b2a-ad57-055d19cebf0a\") " pod="openstack/glance-default-external-api-0" Oct 07 08:15:16 crc kubenswrapper[4875]: I1007 08:15:16.260796 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9d4dbc12-0c00-4b2a-ad57-055d19cebf0a-logs\") pod \"glance-default-external-api-0\" (UID: \"9d4dbc12-0c00-4b2a-ad57-055d19cebf0a\") " pod="openstack/glance-default-external-api-0" Oct 07 08:15:16 crc kubenswrapper[4875]: I1007 08:15:16.260834 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9d4dbc12-0c00-4b2a-ad57-055d19cebf0a-scripts\") pod \"glance-default-external-api-0\" (UID: \"9d4dbc12-0c00-4b2a-ad57-055d19cebf0a\") " pod="openstack/glance-default-external-api-0" Oct 07 08:15:16 crc kubenswrapper[4875]: I1007 08:15:16.260863 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/9d4dbc12-0c00-4b2a-ad57-055d19cebf0a-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"9d4dbc12-0c00-4b2a-ad57-055d19cebf0a\") " pod="openstack/glance-default-external-api-0" Oct 07 08:15:16 crc kubenswrapper[4875]: I1007 08:15:16.260955 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9d4dbc12-0c00-4b2a-ad57-055d19cebf0a-config-data\") pod \"glance-default-external-api-0\" (UID: \"9d4dbc12-0c00-4b2a-ad57-055d19cebf0a\") " pod="openstack/glance-default-external-api-0" Oct 07 08:15:16 crc kubenswrapper[4875]: I1007 08:15:16.261003 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-external-api-0\" (UID: \"9d4dbc12-0c00-4b2a-ad57-055d19cebf0a\") " pod="openstack/glance-default-external-api-0" Oct 07 08:15:16 crc kubenswrapper[4875]: I1007 08:15:16.261069 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9d4dbc12-0c00-4b2a-ad57-055d19cebf0a-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"9d4dbc12-0c00-4b2a-ad57-055d19cebf0a\") " pod="openstack/glance-default-external-api-0" Oct 07 08:15:16 crc kubenswrapper[4875]: I1007 08:15:16.261262 4875 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-external-api-0\" (UID: \"9d4dbc12-0c00-4b2a-ad57-055d19cebf0a\") device mount path \"/mnt/openstack/pv12\"" pod="openstack/glance-default-external-api-0" Oct 07 08:15:16 crc kubenswrapper[4875]: I1007 08:15:16.261437 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/9d4dbc12-0c00-4b2a-ad57-055d19cebf0a-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"9d4dbc12-0c00-4b2a-ad57-055d19cebf0a\") " pod="openstack/glance-default-external-api-0" Oct 07 08:15:16 crc kubenswrapper[4875]: I1007 08:15:16.261502 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9d4dbc12-0c00-4b2a-ad57-055d19cebf0a-logs\") pod \"glance-default-external-api-0\" (UID: \"9d4dbc12-0c00-4b2a-ad57-055d19cebf0a\") " pod="openstack/glance-default-external-api-0" Oct 07 08:15:16 crc kubenswrapper[4875]: I1007 08:15:16.270607 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/9d4dbc12-0c00-4b2a-ad57-055d19cebf0a-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"9d4dbc12-0c00-4b2a-ad57-055d19cebf0a\") " pod="openstack/glance-default-external-api-0" Oct 07 08:15:16 crc kubenswrapper[4875]: I1007 08:15:16.270773 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9d4dbc12-0c00-4b2a-ad57-055d19cebf0a-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"9d4dbc12-0c00-4b2a-ad57-055d19cebf0a\") " pod="openstack/glance-default-external-api-0" Oct 07 08:15:16 crc kubenswrapper[4875]: I1007 08:15:16.271360 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9d4dbc12-0c00-4b2a-ad57-055d19cebf0a-scripts\") pod \"glance-default-external-api-0\" (UID: \"9d4dbc12-0c00-4b2a-ad57-055d19cebf0a\") " pod="openstack/glance-default-external-api-0" Oct 07 08:15:16 crc kubenswrapper[4875]: I1007 08:15:16.274187 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9d4dbc12-0c00-4b2a-ad57-055d19cebf0a-config-data\") pod \"glance-default-external-api-0\" (UID: \"9d4dbc12-0c00-4b2a-ad57-055d19cebf0a\") " pod="openstack/glance-default-external-api-0" Oct 07 08:15:16 crc kubenswrapper[4875]: I1007 08:15:16.287012 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bd5ng\" (UniqueName: \"kubernetes.io/projected/9d4dbc12-0c00-4b2a-ad57-055d19cebf0a-kube-api-access-bd5ng\") pod \"glance-default-external-api-0\" (UID: \"9d4dbc12-0c00-4b2a-ad57-055d19cebf0a\") " pod="openstack/glance-default-external-api-0" Oct 07 08:15:16 crc kubenswrapper[4875]: I1007 08:15:16.298323 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-external-api-0\" (UID: \"9d4dbc12-0c00-4b2a-ad57-055d19cebf0a\") " pod="openstack/glance-default-external-api-0" Oct 07 08:15:16 crc kubenswrapper[4875]: I1007 08:15:16.361318 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 07 08:15:16 crc kubenswrapper[4875]: I1007 08:15:16.816151 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 07 08:15:16 crc kubenswrapper[4875]: I1007 08:15:16.962891 4875 generic.go:334] "Generic (PLEG): container finished" podID="d4968869-ac3f-4633-90f1-fc6693f3bb9c" containerID="79c4b37a19daebd87b9fdd58b70ee72208fbd418f48a1ba05c26f39375b43bc1" exitCode=0 Oct 07 08:15:16 crc kubenswrapper[4875]: I1007 08:15:16.962997 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d4968869-ac3f-4633-90f1-fc6693f3bb9c","Type":"ContainerDied","Data":"79c4b37a19daebd87b9fdd58b70ee72208fbd418f48a1ba05c26f39375b43bc1"} Oct 07 08:15:16 crc kubenswrapper[4875]: I1007 08:15:16.963024 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d4968869-ac3f-4633-90f1-fc6693f3bb9c","Type":"ContainerDied","Data":"65d3bcbf066697b4db7f058bb1fa6101954726060b15e5daba2a70b54cec87bf"} Oct 07 08:15:16 crc kubenswrapper[4875]: I1007 08:15:16.963058 4875 scope.go:117] "RemoveContainer" containerID="d62efcc86f780b5479f8486d0ea49d84fa3bc7c148295c5cae739a9b00cdc7f7" Oct 07 08:15:16 crc kubenswrapper[4875]: I1007 08:15:16.963221 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 07 08:15:16 crc kubenswrapper[4875]: I1007 08:15:16.968609 4875 generic.go:334] "Generic (PLEG): container finished" podID="3f6edd8b-6c78-4ab9-9cbc-49b66303467d" containerID="251b6fba44dd7d45dde7ca92d7e21074816028b46c47c0cd064b7c7e3a921777" exitCode=0 Oct 07 08:15:16 crc kubenswrapper[4875]: I1007 08:15:16.968655 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-8bg6r" event={"ID":"3f6edd8b-6c78-4ab9-9cbc-49b66303467d","Type":"ContainerDied","Data":"251b6fba44dd7d45dde7ca92d7e21074816028b46c47c0cd064b7c7e3a921777"} Oct 07 08:15:16 crc kubenswrapper[4875]: I1007 08:15:16.975314 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d4968869-ac3f-4633-90f1-fc6693f3bb9c-log-httpd\") pod \"d4968869-ac3f-4633-90f1-fc6693f3bb9c\" (UID: \"d4968869-ac3f-4633-90f1-fc6693f3bb9c\") " Oct 07 08:15:16 crc kubenswrapper[4875]: I1007 08:15:16.976417 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d4968869-ac3f-4633-90f1-fc6693f3bb9c-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "d4968869-ac3f-4633-90f1-fc6693f3bb9c" (UID: "d4968869-ac3f-4633-90f1-fc6693f3bb9c"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 08:15:16 crc kubenswrapper[4875]: I1007 08:15:16.976855 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d4968869-ac3f-4633-90f1-fc6693f3bb9c-config-data\") pod \"d4968869-ac3f-4633-90f1-fc6693f3bb9c\" (UID: \"d4968869-ac3f-4633-90f1-fc6693f3bb9c\") " Oct 07 08:15:16 crc kubenswrapper[4875]: I1007 08:15:16.977193 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d4968869-ac3f-4633-90f1-fc6693f3bb9c-scripts\") pod \"d4968869-ac3f-4633-90f1-fc6693f3bb9c\" (UID: \"d4968869-ac3f-4633-90f1-fc6693f3bb9c\") " Oct 07 08:15:16 crc kubenswrapper[4875]: I1007 08:15:16.977322 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d4968869-ac3f-4633-90f1-fc6693f3bb9c-run-httpd\") pod \"d4968869-ac3f-4633-90f1-fc6693f3bb9c\" (UID: \"d4968869-ac3f-4633-90f1-fc6693f3bb9c\") " Oct 07 08:15:16 crc kubenswrapper[4875]: I1007 08:15:16.977412 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/d4968869-ac3f-4633-90f1-fc6693f3bb9c-sg-core-conf-yaml\") pod \"d4968869-ac3f-4633-90f1-fc6693f3bb9c\" (UID: \"d4968869-ac3f-4633-90f1-fc6693f3bb9c\") " Oct 07 08:15:16 crc kubenswrapper[4875]: I1007 08:15:16.977507 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wxdbt\" (UniqueName: \"kubernetes.io/projected/d4968869-ac3f-4633-90f1-fc6693f3bb9c-kube-api-access-wxdbt\") pod \"d4968869-ac3f-4633-90f1-fc6693f3bb9c\" (UID: \"d4968869-ac3f-4633-90f1-fc6693f3bb9c\") " Oct 07 08:15:16 crc kubenswrapper[4875]: I1007 08:15:16.977639 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d4968869-ac3f-4633-90f1-fc6693f3bb9c-combined-ca-bundle\") pod \"d4968869-ac3f-4633-90f1-fc6693f3bb9c\" (UID: \"d4968869-ac3f-4633-90f1-fc6693f3bb9c\") " Oct 07 08:15:16 crc kubenswrapper[4875]: I1007 08:15:16.978583 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d4968869-ac3f-4633-90f1-fc6693f3bb9c-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "d4968869-ac3f-4633-90f1-fc6693f3bb9c" (UID: "d4968869-ac3f-4633-90f1-fc6693f3bb9c"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 08:15:16 crc kubenswrapper[4875]: I1007 08:15:16.979739 4875 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d4968869-ac3f-4633-90f1-fc6693f3bb9c-log-httpd\") on node \"crc\" DevicePath \"\"" Oct 07 08:15:16 crc kubenswrapper[4875]: I1007 08:15:16.979993 4875 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d4968869-ac3f-4633-90f1-fc6693f3bb9c-run-httpd\") on node \"crc\" DevicePath \"\"" Oct 07 08:15:16 crc kubenswrapper[4875]: I1007 08:15:16.993133 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d4968869-ac3f-4633-90f1-fc6693f3bb9c-scripts" (OuterVolumeSpecName: "scripts") pod "d4968869-ac3f-4633-90f1-fc6693f3bb9c" (UID: "d4968869-ac3f-4633-90f1-fc6693f3bb9c"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:15:16 crc kubenswrapper[4875]: I1007 08:15:16.998293 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d4968869-ac3f-4633-90f1-fc6693f3bb9c-kube-api-access-wxdbt" (OuterVolumeSpecName: "kube-api-access-wxdbt") pod "d4968869-ac3f-4633-90f1-fc6693f3bb9c" (UID: "d4968869-ac3f-4633-90f1-fc6693f3bb9c"). InnerVolumeSpecName "kube-api-access-wxdbt". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 08:15:17 crc kubenswrapper[4875]: I1007 08:15:17.005853 4875 scope.go:117] "RemoveContainer" containerID="2a8f508aadda7ae7a9c6b0a3d5b1b96b99a073a67849768188fdd27984ab5341" Oct 07 08:15:17 crc kubenswrapper[4875]: I1007 08:15:17.032463 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d4968869-ac3f-4633-90f1-fc6693f3bb9c-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "d4968869-ac3f-4633-90f1-fc6693f3bb9c" (UID: "d4968869-ac3f-4633-90f1-fc6693f3bb9c"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:15:17 crc kubenswrapper[4875]: I1007 08:15:17.070340 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 07 08:15:17 crc kubenswrapper[4875]: W1007 08:15:17.073127 4875 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9d4dbc12_0c00_4b2a_ad57_055d19cebf0a.slice/crio-186a5895ef85a62007026709917d3ddf05b7a7d2be2efbb50009a04f8843bbbd WatchSource:0}: Error finding container 186a5895ef85a62007026709917d3ddf05b7a7d2be2efbb50009a04f8843bbbd: Status 404 returned error can't find the container with id 186a5895ef85a62007026709917d3ddf05b7a7d2be2efbb50009a04f8843bbbd Oct 07 08:15:17 crc kubenswrapper[4875]: I1007 08:15:17.081285 4875 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d4968869-ac3f-4633-90f1-fc6693f3bb9c-scripts\") on node \"crc\" DevicePath \"\"" Oct 07 08:15:17 crc kubenswrapper[4875]: I1007 08:15:17.081314 4875 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/d4968869-ac3f-4633-90f1-fc6693f3bb9c-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Oct 07 08:15:17 crc kubenswrapper[4875]: I1007 08:15:17.081324 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wxdbt\" (UniqueName: \"kubernetes.io/projected/d4968869-ac3f-4633-90f1-fc6693f3bb9c-kube-api-access-wxdbt\") on node \"crc\" DevicePath \"\"" Oct 07 08:15:17 crc kubenswrapper[4875]: I1007 08:15:17.083982 4875 scope.go:117] "RemoveContainer" containerID="b203b7b46c3a6530c1f6c377edb8e1ede0c43a982a428b4e0e7bf6472855c246" Oct 07 08:15:17 crc kubenswrapper[4875]: I1007 08:15:17.110181 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d4968869-ac3f-4633-90f1-fc6693f3bb9c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d4968869-ac3f-4633-90f1-fc6693f3bb9c" (UID: "d4968869-ac3f-4633-90f1-fc6693f3bb9c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:15:17 crc kubenswrapper[4875]: I1007 08:15:17.115380 4875 scope.go:117] "RemoveContainer" containerID="79c4b37a19daebd87b9fdd58b70ee72208fbd418f48a1ba05c26f39375b43bc1" Oct 07 08:15:17 crc kubenswrapper[4875]: I1007 08:15:17.142510 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d4968869-ac3f-4633-90f1-fc6693f3bb9c-config-data" (OuterVolumeSpecName: "config-data") pod "d4968869-ac3f-4633-90f1-fc6693f3bb9c" (UID: "d4968869-ac3f-4633-90f1-fc6693f3bb9c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:15:17 crc kubenswrapper[4875]: I1007 08:15:17.146925 4875 scope.go:117] "RemoveContainer" containerID="d62efcc86f780b5479f8486d0ea49d84fa3bc7c148295c5cae739a9b00cdc7f7" Oct 07 08:15:17 crc kubenswrapper[4875]: E1007 08:15:17.147605 4875 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d62efcc86f780b5479f8486d0ea49d84fa3bc7c148295c5cae739a9b00cdc7f7\": container with ID starting with d62efcc86f780b5479f8486d0ea49d84fa3bc7c148295c5cae739a9b00cdc7f7 not found: ID does not exist" containerID="d62efcc86f780b5479f8486d0ea49d84fa3bc7c148295c5cae739a9b00cdc7f7" Oct 07 08:15:17 crc kubenswrapper[4875]: I1007 08:15:17.147650 4875 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d62efcc86f780b5479f8486d0ea49d84fa3bc7c148295c5cae739a9b00cdc7f7"} err="failed to get container status \"d62efcc86f780b5479f8486d0ea49d84fa3bc7c148295c5cae739a9b00cdc7f7\": rpc error: code = NotFound desc = could not find container \"d62efcc86f780b5479f8486d0ea49d84fa3bc7c148295c5cae739a9b00cdc7f7\": container with ID starting with d62efcc86f780b5479f8486d0ea49d84fa3bc7c148295c5cae739a9b00cdc7f7 not found: ID does not exist" Oct 07 08:15:17 crc kubenswrapper[4875]: I1007 08:15:17.147679 4875 scope.go:117] "RemoveContainer" containerID="2a8f508aadda7ae7a9c6b0a3d5b1b96b99a073a67849768188fdd27984ab5341" Oct 07 08:15:17 crc kubenswrapper[4875]: E1007 08:15:17.148094 4875 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2a8f508aadda7ae7a9c6b0a3d5b1b96b99a073a67849768188fdd27984ab5341\": container with ID starting with 2a8f508aadda7ae7a9c6b0a3d5b1b96b99a073a67849768188fdd27984ab5341 not found: ID does not exist" containerID="2a8f508aadda7ae7a9c6b0a3d5b1b96b99a073a67849768188fdd27984ab5341" Oct 07 08:15:17 crc kubenswrapper[4875]: I1007 08:15:17.148123 4875 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2a8f508aadda7ae7a9c6b0a3d5b1b96b99a073a67849768188fdd27984ab5341"} err="failed to get container status \"2a8f508aadda7ae7a9c6b0a3d5b1b96b99a073a67849768188fdd27984ab5341\": rpc error: code = NotFound desc = could not find container \"2a8f508aadda7ae7a9c6b0a3d5b1b96b99a073a67849768188fdd27984ab5341\": container with ID starting with 2a8f508aadda7ae7a9c6b0a3d5b1b96b99a073a67849768188fdd27984ab5341 not found: ID does not exist" Oct 07 08:15:17 crc kubenswrapper[4875]: I1007 08:15:17.148142 4875 scope.go:117] "RemoveContainer" containerID="b203b7b46c3a6530c1f6c377edb8e1ede0c43a982a428b4e0e7bf6472855c246" Oct 07 08:15:17 crc kubenswrapper[4875]: E1007 08:15:17.148557 4875 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b203b7b46c3a6530c1f6c377edb8e1ede0c43a982a428b4e0e7bf6472855c246\": container with ID starting with b203b7b46c3a6530c1f6c377edb8e1ede0c43a982a428b4e0e7bf6472855c246 not found: ID does not exist" containerID="b203b7b46c3a6530c1f6c377edb8e1ede0c43a982a428b4e0e7bf6472855c246" Oct 07 08:15:17 crc kubenswrapper[4875]: I1007 08:15:17.148665 4875 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b203b7b46c3a6530c1f6c377edb8e1ede0c43a982a428b4e0e7bf6472855c246"} err="failed to get container status \"b203b7b46c3a6530c1f6c377edb8e1ede0c43a982a428b4e0e7bf6472855c246\": rpc error: code = NotFound desc = could not find container \"b203b7b46c3a6530c1f6c377edb8e1ede0c43a982a428b4e0e7bf6472855c246\": container with ID starting with b203b7b46c3a6530c1f6c377edb8e1ede0c43a982a428b4e0e7bf6472855c246 not found: ID does not exist" Oct 07 08:15:17 crc kubenswrapper[4875]: I1007 08:15:17.148746 4875 scope.go:117] "RemoveContainer" containerID="79c4b37a19daebd87b9fdd58b70ee72208fbd418f48a1ba05c26f39375b43bc1" Oct 07 08:15:17 crc kubenswrapper[4875]: E1007 08:15:17.149196 4875 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"79c4b37a19daebd87b9fdd58b70ee72208fbd418f48a1ba05c26f39375b43bc1\": container with ID starting with 79c4b37a19daebd87b9fdd58b70ee72208fbd418f48a1ba05c26f39375b43bc1 not found: ID does not exist" containerID="79c4b37a19daebd87b9fdd58b70ee72208fbd418f48a1ba05c26f39375b43bc1" Oct 07 08:15:17 crc kubenswrapper[4875]: I1007 08:15:17.149292 4875 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"79c4b37a19daebd87b9fdd58b70ee72208fbd418f48a1ba05c26f39375b43bc1"} err="failed to get container status \"79c4b37a19daebd87b9fdd58b70ee72208fbd418f48a1ba05c26f39375b43bc1\": rpc error: code = NotFound desc = could not find container \"79c4b37a19daebd87b9fdd58b70ee72208fbd418f48a1ba05c26f39375b43bc1\": container with ID starting with 79c4b37a19daebd87b9fdd58b70ee72208fbd418f48a1ba05c26f39375b43bc1 not found: ID does not exist" Oct 07 08:15:17 crc kubenswrapper[4875]: I1007 08:15:17.184599 4875 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d4968869-ac3f-4633-90f1-fc6693f3bb9c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 08:15:17 crc kubenswrapper[4875]: I1007 08:15:17.184642 4875 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d4968869-ac3f-4633-90f1-fc6693f3bb9c-config-data\") on node \"crc\" DevicePath \"\"" Oct 07 08:15:17 crc kubenswrapper[4875]: I1007 08:15:17.321944 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 07 08:15:17 crc kubenswrapper[4875]: I1007 08:15:17.332502 4875 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Oct 07 08:15:17 crc kubenswrapper[4875]: I1007 08:15:17.345971 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Oct 07 08:15:17 crc kubenswrapper[4875]: E1007 08:15:17.346362 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d4968869-ac3f-4633-90f1-fc6693f3bb9c" containerName="proxy-httpd" Oct 07 08:15:17 crc kubenswrapper[4875]: I1007 08:15:17.346381 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="d4968869-ac3f-4633-90f1-fc6693f3bb9c" containerName="proxy-httpd" Oct 07 08:15:17 crc kubenswrapper[4875]: E1007 08:15:17.346407 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d4968869-ac3f-4633-90f1-fc6693f3bb9c" containerName="sg-core" Oct 07 08:15:17 crc kubenswrapper[4875]: I1007 08:15:17.346417 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="d4968869-ac3f-4633-90f1-fc6693f3bb9c" containerName="sg-core" Oct 07 08:15:17 crc kubenswrapper[4875]: E1007 08:15:17.346428 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d4968869-ac3f-4633-90f1-fc6693f3bb9c" containerName="ceilometer-notification-agent" Oct 07 08:15:17 crc kubenswrapper[4875]: I1007 08:15:17.346436 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="d4968869-ac3f-4633-90f1-fc6693f3bb9c" containerName="ceilometer-notification-agent" Oct 07 08:15:17 crc kubenswrapper[4875]: E1007 08:15:17.346471 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d4968869-ac3f-4633-90f1-fc6693f3bb9c" containerName="ceilometer-central-agent" Oct 07 08:15:17 crc kubenswrapper[4875]: I1007 08:15:17.346480 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="d4968869-ac3f-4633-90f1-fc6693f3bb9c" containerName="ceilometer-central-agent" Oct 07 08:15:17 crc kubenswrapper[4875]: I1007 08:15:17.346847 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="d4968869-ac3f-4633-90f1-fc6693f3bb9c" containerName="sg-core" Oct 07 08:15:17 crc kubenswrapper[4875]: I1007 08:15:17.346867 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="d4968869-ac3f-4633-90f1-fc6693f3bb9c" containerName="ceilometer-central-agent" Oct 07 08:15:17 crc kubenswrapper[4875]: I1007 08:15:17.346892 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="d4968869-ac3f-4633-90f1-fc6693f3bb9c" containerName="ceilometer-notification-agent" Oct 07 08:15:17 crc kubenswrapper[4875]: I1007 08:15:17.346932 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="d4968869-ac3f-4633-90f1-fc6693f3bb9c" containerName="proxy-httpd" Oct 07 08:15:17 crc kubenswrapper[4875]: I1007 08:15:17.349173 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 07 08:15:17 crc kubenswrapper[4875]: I1007 08:15:17.353358 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Oct 07 08:15:17 crc kubenswrapper[4875]: I1007 08:15:17.353537 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Oct 07 08:15:17 crc kubenswrapper[4875]: I1007 08:15:17.387316 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 07 08:15:17 crc kubenswrapper[4875]: I1007 08:15:17.490632 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7bfp7\" (UniqueName: \"kubernetes.io/projected/05e99478-4905-4598-9258-d4ffe06d1d97-kube-api-access-7bfp7\") pod \"ceilometer-0\" (UID: \"05e99478-4905-4598-9258-d4ffe06d1d97\") " pod="openstack/ceilometer-0" Oct 07 08:15:17 crc kubenswrapper[4875]: I1007 08:15:17.490694 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/05e99478-4905-4598-9258-d4ffe06d1d97-log-httpd\") pod \"ceilometer-0\" (UID: \"05e99478-4905-4598-9258-d4ffe06d1d97\") " pod="openstack/ceilometer-0" Oct 07 08:15:17 crc kubenswrapper[4875]: I1007 08:15:17.490732 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/05e99478-4905-4598-9258-d4ffe06d1d97-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"05e99478-4905-4598-9258-d4ffe06d1d97\") " pod="openstack/ceilometer-0" Oct 07 08:15:17 crc kubenswrapper[4875]: I1007 08:15:17.490754 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/05e99478-4905-4598-9258-d4ffe06d1d97-config-data\") pod \"ceilometer-0\" (UID: \"05e99478-4905-4598-9258-d4ffe06d1d97\") " pod="openstack/ceilometer-0" Oct 07 08:15:17 crc kubenswrapper[4875]: I1007 08:15:17.490802 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/05e99478-4905-4598-9258-d4ffe06d1d97-scripts\") pod \"ceilometer-0\" (UID: \"05e99478-4905-4598-9258-d4ffe06d1d97\") " pod="openstack/ceilometer-0" Oct 07 08:15:17 crc kubenswrapper[4875]: I1007 08:15:17.490825 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/05e99478-4905-4598-9258-d4ffe06d1d97-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"05e99478-4905-4598-9258-d4ffe06d1d97\") " pod="openstack/ceilometer-0" Oct 07 08:15:17 crc kubenswrapper[4875]: I1007 08:15:17.490847 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/05e99478-4905-4598-9258-d4ffe06d1d97-run-httpd\") pod \"ceilometer-0\" (UID: \"05e99478-4905-4598-9258-d4ffe06d1d97\") " pod="openstack/ceilometer-0" Oct 07 08:15:17 crc kubenswrapper[4875]: I1007 08:15:17.593047 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7bfp7\" (UniqueName: \"kubernetes.io/projected/05e99478-4905-4598-9258-d4ffe06d1d97-kube-api-access-7bfp7\") pod \"ceilometer-0\" (UID: \"05e99478-4905-4598-9258-d4ffe06d1d97\") " pod="openstack/ceilometer-0" Oct 07 08:15:17 crc kubenswrapper[4875]: I1007 08:15:17.593117 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/05e99478-4905-4598-9258-d4ffe06d1d97-log-httpd\") pod \"ceilometer-0\" (UID: \"05e99478-4905-4598-9258-d4ffe06d1d97\") " pod="openstack/ceilometer-0" Oct 07 08:15:17 crc kubenswrapper[4875]: I1007 08:15:17.593174 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/05e99478-4905-4598-9258-d4ffe06d1d97-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"05e99478-4905-4598-9258-d4ffe06d1d97\") " pod="openstack/ceilometer-0" Oct 07 08:15:17 crc kubenswrapper[4875]: I1007 08:15:17.593201 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/05e99478-4905-4598-9258-d4ffe06d1d97-config-data\") pod \"ceilometer-0\" (UID: \"05e99478-4905-4598-9258-d4ffe06d1d97\") " pod="openstack/ceilometer-0" Oct 07 08:15:17 crc kubenswrapper[4875]: I1007 08:15:17.593253 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/05e99478-4905-4598-9258-d4ffe06d1d97-scripts\") pod \"ceilometer-0\" (UID: \"05e99478-4905-4598-9258-d4ffe06d1d97\") " pod="openstack/ceilometer-0" Oct 07 08:15:17 crc kubenswrapper[4875]: I1007 08:15:17.593286 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/05e99478-4905-4598-9258-d4ffe06d1d97-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"05e99478-4905-4598-9258-d4ffe06d1d97\") " pod="openstack/ceilometer-0" Oct 07 08:15:17 crc kubenswrapper[4875]: I1007 08:15:17.593320 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/05e99478-4905-4598-9258-d4ffe06d1d97-run-httpd\") pod \"ceilometer-0\" (UID: \"05e99478-4905-4598-9258-d4ffe06d1d97\") " pod="openstack/ceilometer-0" Oct 07 08:15:17 crc kubenswrapper[4875]: I1007 08:15:17.593706 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/05e99478-4905-4598-9258-d4ffe06d1d97-log-httpd\") pod \"ceilometer-0\" (UID: \"05e99478-4905-4598-9258-d4ffe06d1d97\") " pod="openstack/ceilometer-0" Oct 07 08:15:17 crc kubenswrapper[4875]: I1007 08:15:17.593782 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/05e99478-4905-4598-9258-d4ffe06d1d97-run-httpd\") pod \"ceilometer-0\" (UID: \"05e99478-4905-4598-9258-d4ffe06d1d97\") " pod="openstack/ceilometer-0" Oct 07 08:15:17 crc kubenswrapper[4875]: I1007 08:15:17.604577 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/05e99478-4905-4598-9258-d4ffe06d1d97-scripts\") pod \"ceilometer-0\" (UID: \"05e99478-4905-4598-9258-d4ffe06d1d97\") " pod="openstack/ceilometer-0" Oct 07 08:15:17 crc kubenswrapper[4875]: I1007 08:15:17.607634 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/05e99478-4905-4598-9258-d4ffe06d1d97-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"05e99478-4905-4598-9258-d4ffe06d1d97\") " pod="openstack/ceilometer-0" Oct 07 08:15:17 crc kubenswrapper[4875]: I1007 08:15:17.614734 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/05e99478-4905-4598-9258-d4ffe06d1d97-config-data\") pod \"ceilometer-0\" (UID: \"05e99478-4905-4598-9258-d4ffe06d1d97\") " pod="openstack/ceilometer-0" Oct 07 08:15:17 crc kubenswrapper[4875]: I1007 08:15:17.621315 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/05e99478-4905-4598-9258-d4ffe06d1d97-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"05e99478-4905-4598-9258-d4ffe06d1d97\") " pod="openstack/ceilometer-0" Oct 07 08:15:17 crc kubenswrapper[4875]: I1007 08:15:17.621325 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7bfp7\" (UniqueName: \"kubernetes.io/projected/05e99478-4905-4598-9258-d4ffe06d1d97-kube-api-access-7bfp7\") pod \"ceilometer-0\" (UID: \"05e99478-4905-4598-9258-d4ffe06d1d97\") " pod="openstack/ceilometer-0" Oct 07 08:15:17 crc kubenswrapper[4875]: I1007 08:15:17.721948 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9b9f241f-e30b-416b-8a42-666f2fc72a79" path="/var/lib/kubelet/pods/9b9f241f-e30b-416b-8a42-666f2fc72a79/volumes" Oct 07 08:15:17 crc kubenswrapper[4875]: I1007 08:15:17.736733 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d4968869-ac3f-4633-90f1-fc6693f3bb9c" path="/var/lib/kubelet/pods/d4968869-ac3f-4633-90f1-fc6693f3bb9c/volumes" Oct 07 08:15:17 crc kubenswrapper[4875]: I1007 08:15:17.799940 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 07 08:15:17 crc kubenswrapper[4875]: I1007 08:15:17.878070 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 07 08:15:17 crc kubenswrapper[4875]: I1007 08:15:17.996064 4875 generic.go:334] "Generic (PLEG): container finished" podID="626af307-8e17-427d-a18f-70ad5e8cc62f" containerID="9db3ed5127c7d7c091668dd5d498fbdc413bd8ff20bb88b82512985c882a85da" exitCode=0 Oct 07 08:15:17 crc kubenswrapper[4875]: I1007 08:15:17.996703 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"626af307-8e17-427d-a18f-70ad5e8cc62f","Type":"ContainerDied","Data":"9db3ed5127c7d7c091668dd5d498fbdc413bd8ff20bb88b82512985c882a85da"} Oct 07 08:15:17 crc kubenswrapper[4875]: I1007 08:15:17.996748 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"626af307-8e17-427d-a18f-70ad5e8cc62f","Type":"ContainerDied","Data":"119540c79b8f0f6afcc7298252d97062239e8fe2a352375b94a272c2d1f8be80"} Oct 07 08:15:17 crc kubenswrapper[4875]: I1007 08:15:17.996772 4875 scope.go:117] "RemoveContainer" containerID="9db3ed5127c7d7c091668dd5d498fbdc413bd8ff20bb88b82512985c882a85da" Oct 07 08:15:17 crc kubenswrapper[4875]: I1007 08:15:17.997457 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 07 08:15:18 crc kubenswrapper[4875]: I1007 08:15:18.001001 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"9d4dbc12-0c00-4b2a-ad57-055d19cebf0a","Type":"ContainerStarted","Data":"41921d505180176f495c87c7cce295568c864fca40e7de4c265b2d290f603296"} Oct 07 08:15:18 crc kubenswrapper[4875]: I1007 08:15:18.001051 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"9d4dbc12-0c00-4b2a-ad57-055d19cebf0a","Type":"ContainerStarted","Data":"186a5895ef85a62007026709917d3ddf05b7a7d2be2efbb50009a04f8843bbbd"} Oct 07 08:15:18 crc kubenswrapper[4875]: I1007 08:15:18.010331 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/626af307-8e17-427d-a18f-70ad5e8cc62f-scripts\") pod \"626af307-8e17-427d-a18f-70ad5e8cc62f\" (UID: \"626af307-8e17-427d-a18f-70ad5e8cc62f\") " Oct 07 08:15:18 crc kubenswrapper[4875]: I1007 08:15:18.010427 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"626af307-8e17-427d-a18f-70ad5e8cc62f\" (UID: \"626af307-8e17-427d-a18f-70ad5e8cc62f\") " Oct 07 08:15:18 crc kubenswrapper[4875]: I1007 08:15:18.010493 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/626af307-8e17-427d-a18f-70ad5e8cc62f-internal-tls-certs\") pod \"626af307-8e17-427d-a18f-70ad5e8cc62f\" (UID: \"626af307-8e17-427d-a18f-70ad5e8cc62f\") " Oct 07 08:15:18 crc kubenswrapper[4875]: I1007 08:15:18.010562 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6bqk5\" (UniqueName: \"kubernetes.io/projected/626af307-8e17-427d-a18f-70ad5e8cc62f-kube-api-access-6bqk5\") pod \"626af307-8e17-427d-a18f-70ad5e8cc62f\" (UID: \"626af307-8e17-427d-a18f-70ad5e8cc62f\") " Oct 07 08:15:18 crc kubenswrapper[4875]: I1007 08:15:18.010622 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/626af307-8e17-427d-a18f-70ad5e8cc62f-logs\") pod \"626af307-8e17-427d-a18f-70ad5e8cc62f\" (UID: \"626af307-8e17-427d-a18f-70ad5e8cc62f\") " Oct 07 08:15:18 crc kubenswrapper[4875]: I1007 08:15:18.010719 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/626af307-8e17-427d-a18f-70ad5e8cc62f-httpd-run\") pod \"626af307-8e17-427d-a18f-70ad5e8cc62f\" (UID: \"626af307-8e17-427d-a18f-70ad5e8cc62f\") " Oct 07 08:15:18 crc kubenswrapper[4875]: I1007 08:15:18.010755 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/626af307-8e17-427d-a18f-70ad5e8cc62f-combined-ca-bundle\") pod \"626af307-8e17-427d-a18f-70ad5e8cc62f\" (UID: \"626af307-8e17-427d-a18f-70ad5e8cc62f\") " Oct 07 08:15:18 crc kubenswrapper[4875]: I1007 08:15:18.010846 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/626af307-8e17-427d-a18f-70ad5e8cc62f-config-data\") pod \"626af307-8e17-427d-a18f-70ad5e8cc62f\" (UID: \"626af307-8e17-427d-a18f-70ad5e8cc62f\") " Oct 07 08:15:18 crc kubenswrapper[4875]: I1007 08:15:18.013514 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/626af307-8e17-427d-a18f-70ad5e8cc62f-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "626af307-8e17-427d-a18f-70ad5e8cc62f" (UID: "626af307-8e17-427d-a18f-70ad5e8cc62f"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 08:15:18 crc kubenswrapper[4875]: I1007 08:15:18.013835 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/626af307-8e17-427d-a18f-70ad5e8cc62f-logs" (OuterVolumeSpecName: "logs") pod "626af307-8e17-427d-a18f-70ad5e8cc62f" (UID: "626af307-8e17-427d-a18f-70ad5e8cc62f"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 08:15:18 crc kubenswrapper[4875]: I1007 08:15:18.016265 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/626af307-8e17-427d-a18f-70ad5e8cc62f-kube-api-access-6bqk5" (OuterVolumeSpecName: "kube-api-access-6bqk5") pod "626af307-8e17-427d-a18f-70ad5e8cc62f" (UID: "626af307-8e17-427d-a18f-70ad5e8cc62f"). InnerVolumeSpecName "kube-api-access-6bqk5". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 08:15:18 crc kubenswrapper[4875]: I1007 08:15:18.016593 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage07-crc" (OuterVolumeSpecName: "glance") pod "626af307-8e17-427d-a18f-70ad5e8cc62f" (UID: "626af307-8e17-427d-a18f-70ad5e8cc62f"). InnerVolumeSpecName "local-storage07-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 07 08:15:18 crc kubenswrapper[4875]: I1007 08:15:18.019659 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/626af307-8e17-427d-a18f-70ad5e8cc62f-scripts" (OuterVolumeSpecName: "scripts") pod "626af307-8e17-427d-a18f-70ad5e8cc62f" (UID: "626af307-8e17-427d-a18f-70ad5e8cc62f"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:15:18 crc kubenswrapper[4875]: I1007 08:15:18.064397 4875 scope.go:117] "RemoveContainer" containerID="464d53d8f3c0d9fdd8d6d00b578f9bfe9ea60a8cdc1d474d4094081662663848" Oct 07 08:15:18 crc kubenswrapper[4875]: I1007 08:15:18.074357 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/626af307-8e17-427d-a18f-70ad5e8cc62f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "626af307-8e17-427d-a18f-70ad5e8cc62f" (UID: "626af307-8e17-427d-a18f-70ad5e8cc62f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:15:18 crc kubenswrapper[4875]: I1007 08:15:18.091510 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/626af307-8e17-427d-a18f-70ad5e8cc62f-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "626af307-8e17-427d-a18f-70ad5e8cc62f" (UID: "626af307-8e17-427d-a18f-70ad5e8cc62f"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:15:18 crc kubenswrapper[4875]: I1007 08:15:18.098241 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/626af307-8e17-427d-a18f-70ad5e8cc62f-config-data" (OuterVolumeSpecName: "config-data") pod "626af307-8e17-427d-a18f-70ad5e8cc62f" (UID: "626af307-8e17-427d-a18f-70ad5e8cc62f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:15:18 crc kubenswrapper[4875]: I1007 08:15:18.117519 4875 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/626af307-8e17-427d-a18f-70ad5e8cc62f-logs\") on node \"crc\" DevicePath \"\"" Oct 07 08:15:18 crc kubenswrapper[4875]: I1007 08:15:18.117547 4875 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/626af307-8e17-427d-a18f-70ad5e8cc62f-httpd-run\") on node \"crc\" DevicePath \"\"" Oct 07 08:15:18 crc kubenswrapper[4875]: I1007 08:15:18.117559 4875 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/626af307-8e17-427d-a18f-70ad5e8cc62f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 08:15:18 crc kubenswrapper[4875]: I1007 08:15:18.117568 4875 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/626af307-8e17-427d-a18f-70ad5e8cc62f-config-data\") on node \"crc\" DevicePath \"\"" Oct 07 08:15:18 crc kubenswrapper[4875]: I1007 08:15:18.117576 4875 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/626af307-8e17-427d-a18f-70ad5e8cc62f-scripts\") on node \"crc\" DevicePath \"\"" Oct 07 08:15:18 crc kubenswrapper[4875]: I1007 08:15:18.117600 4875 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" " Oct 07 08:15:18 crc kubenswrapper[4875]: I1007 08:15:18.117609 4875 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/626af307-8e17-427d-a18f-70ad5e8cc62f-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 07 08:15:18 crc kubenswrapper[4875]: I1007 08:15:18.117617 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6bqk5\" (UniqueName: \"kubernetes.io/projected/626af307-8e17-427d-a18f-70ad5e8cc62f-kube-api-access-6bqk5\") on node \"crc\" DevicePath \"\"" Oct 07 08:15:18 crc kubenswrapper[4875]: I1007 08:15:18.122453 4875 scope.go:117] "RemoveContainer" containerID="9db3ed5127c7d7c091668dd5d498fbdc413bd8ff20bb88b82512985c882a85da" Oct 07 08:15:18 crc kubenswrapper[4875]: E1007 08:15:18.122918 4875 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9db3ed5127c7d7c091668dd5d498fbdc413bd8ff20bb88b82512985c882a85da\": container with ID starting with 9db3ed5127c7d7c091668dd5d498fbdc413bd8ff20bb88b82512985c882a85da not found: ID does not exist" containerID="9db3ed5127c7d7c091668dd5d498fbdc413bd8ff20bb88b82512985c882a85da" Oct 07 08:15:18 crc kubenswrapper[4875]: I1007 08:15:18.122950 4875 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9db3ed5127c7d7c091668dd5d498fbdc413bd8ff20bb88b82512985c882a85da"} err="failed to get container status \"9db3ed5127c7d7c091668dd5d498fbdc413bd8ff20bb88b82512985c882a85da\": rpc error: code = NotFound desc = could not find container \"9db3ed5127c7d7c091668dd5d498fbdc413bd8ff20bb88b82512985c882a85da\": container with ID starting with 9db3ed5127c7d7c091668dd5d498fbdc413bd8ff20bb88b82512985c882a85da not found: ID does not exist" Oct 07 08:15:18 crc kubenswrapper[4875]: I1007 08:15:18.122976 4875 scope.go:117] "RemoveContainer" containerID="464d53d8f3c0d9fdd8d6d00b578f9bfe9ea60a8cdc1d474d4094081662663848" Oct 07 08:15:18 crc kubenswrapper[4875]: E1007 08:15:18.123189 4875 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"464d53d8f3c0d9fdd8d6d00b578f9bfe9ea60a8cdc1d474d4094081662663848\": container with ID starting with 464d53d8f3c0d9fdd8d6d00b578f9bfe9ea60a8cdc1d474d4094081662663848 not found: ID does not exist" containerID="464d53d8f3c0d9fdd8d6d00b578f9bfe9ea60a8cdc1d474d4094081662663848" Oct 07 08:15:18 crc kubenswrapper[4875]: I1007 08:15:18.123215 4875 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"464d53d8f3c0d9fdd8d6d00b578f9bfe9ea60a8cdc1d474d4094081662663848"} err="failed to get container status \"464d53d8f3c0d9fdd8d6d00b578f9bfe9ea60a8cdc1d474d4094081662663848\": rpc error: code = NotFound desc = could not find container \"464d53d8f3c0d9fdd8d6d00b578f9bfe9ea60a8cdc1d474d4094081662663848\": container with ID starting with 464d53d8f3c0d9fdd8d6d00b578f9bfe9ea60a8cdc1d474d4094081662663848 not found: ID does not exist" Oct 07 08:15:18 crc kubenswrapper[4875]: I1007 08:15:18.145249 4875 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage07-crc" (UniqueName: "kubernetes.io/local-volume/local-storage07-crc") on node "crc" Oct 07 08:15:18 crc kubenswrapper[4875]: I1007 08:15:18.223233 4875 reconciler_common.go:293] "Volume detached for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" DevicePath \"\"" Oct 07 08:15:18 crc kubenswrapper[4875]: I1007 08:15:18.343656 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 07 08:15:18 crc kubenswrapper[4875]: I1007 08:15:18.442849 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-8bg6r" Oct 07 08:15:18 crc kubenswrapper[4875]: I1007 08:15:18.458690 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 07 08:15:18 crc kubenswrapper[4875]: I1007 08:15:18.474294 4875 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 07 08:15:18 crc kubenswrapper[4875]: I1007 08:15:18.524353 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 07 08:15:18 crc kubenswrapper[4875]: E1007 08:15:18.524917 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="626af307-8e17-427d-a18f-70ad5e8cc62f" containerName="glance-httpd" Oct 07 08:15:18 crc kubenswrapper[4875]: I1007 08:15:18.524940 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="626af307-8e17-427d-a18f-70ad5e8cc62f" containerName="glance-httpd" Oct 07 08:15:18 crc kubenswrapper[4875]: E1007 08:15:18.524957 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="626af307-8e17-427d-a18f-70ad5e8cc62f" containerName="glance-log" Oct 07 08:15:18 crc kubenswrapper[4875]: I1007 08:15:18.524964 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="626af307-8e17-427d-a18f-70ad5e8cc62f" containerName="glance-log" Oct 07 08:15:18 crc kubenswrapper[4875]: E1007 08:15:18.524980 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3f6edd8b-6c78-4ab9-9cbc-49b66303467d" containerName="nova-cell0-conductor-db-sync" Oct 07 08:15:18 crc kubenswrapper[4875]: I1007 08:15:18.524989 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="3f6edd8b-6c78-4ab9-9cbc-49b66303467d" containerName="nova-cell0-conductor-db-sync" Oct 07 08:15:18 crc kubenswrapper[4875]: I1007 08:15:18.525197 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="626af307-8e17-427d-a18f-70ad5e8cc62f" containerName="glance-httpd" Oct 07 08:15:18 crc kubenswrapper[4875]: I1007 08:15:18.525216 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="626af307-8e17-427d-a18f-70ad5e8cc62f" containerName="glance-log" Oct 07 08:15:18 crc kubenswrapper[4875]: I1007 08:15:18.525246 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="3f6edd8b-6c78-4ab9-9cbc-49b66303467d" containerName="nova-cell0-conductor-db-sync" Oct 07 08:15:18 crc kubenswrapper[4875]: I1007 08:15:18.526296 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 07 08:15:18 crc kubenswrapper[4875]: I1007 08:15:18.527754 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3f6edd8b-6c78-4ab9-9cbc-49b66303467d-combined-ca-bundle\") pod \"3f6edd8b-6c78-4ab9-9cbc-49b66303467d\" (UID: \"3f6edd8b-6c78-4ab9-9cbc-49b66303467d\") " Oct 07 08:15:18 crc kubenswrapper[4875]: I1007 08:15:18.527974 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3f6edd8b-6c78-4ab9-9cbc-49b66303467d-scripts\") pod \"3f6edd8b-6c78-4ab9-9cbc-49b66303467d\" (UID: \"3f6edd8b-6c78-4ab9-9cbc-49b66303467d\") " Oct 07 08:15:18 crc kubenswrapper[4875]: I1007 08:15:18.528080 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3f6edd8b-6c78-4ab9-9cbc-49b66303467d-config-data\") pod \"3f6edd8b-6c78-4ab9-9cbc-49b66303467d\" (UID: \"3f6edd8b-6c78-4ab9-9cbc-49b66303467d\") " Oct 07 08:15:18 crc kubenswrapper[4875]: I1007 08:15:18.528149 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kzl7d\" (UniqueName: \"kubernetes.io/projected/3f6edd8b-6c78-4ab9-9cbc-49b66303467d-kube-api-access-kzl7d\") pod \"3f6edd8b-6c78-4ab9-9cbc-49b66303467d\" (UID: \"3f6edd8b-6c78-4ab9-9cbc-49b66303467d\") " Oct 07 08:15:18 crc kubenswrapper[4875]: I1007 08:15:18.529554 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Oct 07 08:15:18 crc kubenswrapper[4875]: I1007 08:15:18.533362 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3f6edd8b-6c78-4ab9-9cbc-49b66303467d-kube-api-access-kzl7d" (OuterVolumeSpecName: "kube-api-access-kzl7d") pod "3f6edd8b-6c78-4ab9-9cbc-49b66303467d" (UID: "3f6edd8b-6c78-4ab9-9cbc-49b66303467d"). InnerVolumeSpecName "kube-api-access-kzl7d". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 08:15:18 crc kubenswrapper[4875]: I1007 08:15:18.533580 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3f6edd8b-6c78-4ab9-9cbc-49b66303467d-scripts" (OuterVolumeSpecName: "scripts") pod "3f6edd8b-6c78-4ab9-9cbc-49b66303467d" (UID: "3f6edd8b-6c78-4ab9-9cbc-49b66303467d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:15:18 crc kubenswrapper[4875]: I1007 08:15:18.537736 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Oct 07 08:15:18 crc kubenswrapper[4875]: I1007 08:15:18.552192 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 07 08:15:18 crc kubenswrapper[4875]: I1007 08:15:18.562608 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3f6edd8b-6c78-4ab9-9cbc-49b66303467d-config-data" (OuterVolumeSpecName: "config-data") pod "3f6edd8b-6c78-4ab9-9cbc-49b66303467d" (UID: "3f6edd8b-6c78-4ab9-9cbc-49b66303467d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:15:18 crc kubenswrapper[4875]: I1007 08:15:18.565091 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3f6edd8b-6c78-4ab9-9cbc-49b66303467d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3f6edd8b-6c78-4ab9-9cbc-49b66303467d" (UID: "3f6edd8b-6c78-4ab9-9cbc-49b66303467d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:15:18 crc kubenswrapper[4875]: I1007 08:15:18.630010 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xnnk4\" (UniqueName: \"kubernetes.io/projected/95d6a4f6-2e39-4b75-aadf-ca829e1e9911-kube-api-access-xnnk4\") pod \"glance-default-internal-api-0\" (UID: \"95d6a4f6-2e39-4b75-aadf-ca829e1e9911\") " pod="openstack/glance-default-internal-api-0" Oct 07 08:15:18 crc kubenswrapper[4875]: I1007 08:15:18.630306 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/95d6a4f6-2e39-4b75-aadf-ca829e1e9911-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"95d6a4f6-2e39-4b75-aadf-ca829e1e9911\") " pod="openstack/glance-default-internal-api-0" Oct 07 08:15:18 crc kubenswrapper[4875]: I1007 08:15:18.630396 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/95d6a4f6-2e39-4b75-aadf-ca829e1e9911-logs\") pod \"glance-default-internal-api-0\" (UID: \"95d6a4f6-2e39-4b75-aadf-ca829e1e9911\") " pod="openstack/glance-default-internal-api-0" Oct 07 08:15:18 crc kubenswrapper[4875]: I1007 08:15:18.630533 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/95d6a4f6-2e39-4b75-aadf-ca829e1e9911-config-data\") pod \"glance-default-internal-api-0\" (UID: \"95d6a4f6-2e39-4b75-aadf-ca829e1e9911\") " pod="openstack/glance-default-internal-api-0" Oct 07 08:15:18 crc kubenswrapper[4875]: I1007 08:15:18.630610 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/95d6a4f6-2e39-4b75-aadf-ca829e1e9911-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"95d6a4f6-2e39-4b75-aadf-ca829e1e9911\") " pod="openstack/glance-default-internal-api-0" Oct 07 08:15:18 crc kubenswrapper[4875]: I1007 08:15:18.630757 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-internal-api-0\" (UID: \"95d6a4f6-2e39-4b75-aadf-ca829e1e9911\") " pod="openstack/glance-default-internal-api-0" Oct 07 08:15:18 crc kubenswrapper[4875]: I1007 08:15:18.630840 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/95d6a4f6-2e39-4b75-aadf-ca829e1e9911-scripts\") pod \"glance-default-internal-api-0\" (UID: \"95d6a4f6-2e39-4b75-aadf-ca829e1e9911\") " pod="openstack/glance-default-internal-api-0" Oct 07 08:15:18 crc kubenswrapper[4875]: I1007 08:15:18.630940 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/95d6a4f6-2e39-4b75-aadf-ca829e1e9911-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"95d6a4f6-2e39-4b75-aadf-ca829e1e9911\") " pod="openstack/glance-default-internal-api-0" Oct 07 08:15:18 crc kubenswrapper[4875]: I1007 08:15:18.631293 4875 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3f6edd8b-6c78-4ab9-9cbc-49b66303467d-scripts\") on node \"crc\" DevicePath \"\"" Oct 07 08:15:18 crc kubenswrapper[4875]: I1007 08:15:18.631397 4875 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3f6edd8b-6c78-4ab9-9cbc-49b66303467d-config-data\") on node \"crc\" DevicePath \"\"" Oct 07 08:15:18 crc kubenswrapper[4875]: I1007 08:15:18.631413 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kzl7d\" (UniqueName: \"kubernetes.io/projected/3f6edd8b-6c78-4ab9-9cbc-49b66303467d-kube-api-access-kzl7d\") on node \"crc\" DevicePath \"\"" Oct 07 08:15:18 crc kubenswrapper[4875]: I1007 08:15:18.631428 4875 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3f6edd8b-6c78-4ab9-9cbc-49b66303467d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 08:15:18 crc kubenswrapper[4875]: I1007 08:15:18.735817 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/95d6a4f6-2e39-4b75-aadf-ca829e1e9911-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"95d6a4f6-2e39-4b75-aadf-ca829e1e9911\") " pod="openstack/glance-default-internal-api-0" Oct 07 08:15:18 crc kubenswrapper[4875]: I1007 08:15:18.735940 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xnnk4\" (UniqueName: \"kubernetes.io/projected/95d6a4f6-2e39-4b75-aadf-ca829e1e9911-kube-api-access-xnnk4\") pod \"glance-default-internal-api-0\" (UID: \"95d6a4f6-2e39-4b75-aadf-ca829e1e9911\") " pod="openstack/glance-default-internal-api-0" Oct 07 08:15:18 crc kubenswrapper[4875]: I1007 08:15:18.736067 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/95d6a4f6-2e39-4b75-aadf-ca829e1e9911-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"95d6a4f6-2e39-4b75-aadf-ca829e1e9911\") " pod="openstack/glance-default-internal-api-0" Oct 07 08:15:18 crc kubenswrapper[4875]: I1007 08:15:18.736101 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/95d6a4f6-2e39-4b75-aadf-ca829e1e9911-logs\") pod \"glance-default-internal-api-0\" (UID: \"95d6a4f6-2e39-4b75-aadf-ca829e1e9911\") " pod="openstack/glance-default-internal-api-0" Oct 07 08:15:18 crc kubenswrapper[4875]: I1007 08:15:18.736167 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/95d6a4f6-2e39-4b75-aadf-ca829e1e9911-config-data\") pod \"glance-default-internal-api-0\" (UID: \"95d6a4f6-2e39-4b75-aadf-ca829e1e9911\") " pod="openstack/glance-default-internal-api-0" Oct 07 08:15:18 crc kubenswrapper[4875]: I1007 08:15:18.736192 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/95d6a4f6-2e39-4b75-aadf-ca829e1e9911-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"95d6a4f6-2e39-4b75-aadf-ca829e1e9911\") " pod="openstack/glance-default-internal-api-0" Oct 07 08:15:18 crc kubenswrapper[4875]: I1007 08:15:18.736271 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-internal-api-0\" (UID: \"95d6a4f6-2e39-4b75-aadf-ca829e1e9911\") " pod="openstack/glance-default-internal-api-0" Oct 07 08:15:18 crc kubenswrapper[4875]: I1007 08:15:18.736307 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/95d6a4f6-2e39-4b75-aadf-ca829e1e9911-scripts\") pod \"glance-default-internal-api-0\" (UID: \"95d6a4f6-2e39-4b75-aadf-ca829e1e9911\") " pod="openstack/glance-default-internal-api-0" Oct 07 08:15:18 crc kubenswrapper[4875]: I1007 08:15:18.737080 4875 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-internal-api-0\" (UID: \"95d6a4f6-2e39-4b75-aadf-ca829e1e9911\") device mount path \"/mnt/openstack/pv07\"" pod="openstack/glance-default-internal-api-0" Oct 07 08:15:18 crc kubenswrapper[4875]: I1007 08:15:18.737132 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/95d6a4f6-2e39-4b75-aadf-ca829e1e9911-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"95d6a4f6-2e39-4b75-aadf-ca829e1e9911\") " pod="openstack/glance-default-internal-api-0" Oct 07 08:15:18 crc kubenswrapper[4875]: I1007 08:15:18.737096 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/95d6a4f6-2e39-4b75-aadf-ca829e1e9911-logs\") pod \"glance-default-internal-api-0\" (UID: \"95d6a4f6-2e39-4b75-aadf-ca829e1e9911\") " pod="openstack/glance-default-internal-api-0" Oct 07 08:15:18 crc kubenswrapper[4875]: I1007 08:15:18.745653 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/95d6a4f6-2e39-4b75-aadf-ca829e1e9911-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"95d6a4f6-2e39-4b75-aadf-ca829e1e9911\") " pod="openstack/glance-default-internal-api-0" Oct 07 08:15:18 crc kubenswrapper[4875]: I1007 08:15:18.745946 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/95d6a4f6-2e39-4b75-aadf-ca829e1e9911-scripts\") pod \"glance-default-internal-api-0\" (UID: \"95d6a4f6-2e39-4b75-aadf-ca829e1e9911\") " pod="openstack/glance-default-internal-api-0" Oct 07 08:15:18 crc kubenswrapper[4875]: I1007 08:15:18.754736 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/95d6a4f6-2e39-4b75-aadf-ca829e1e9911-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"95d6a4f6-2e39-4b75-aadf-ca829e1e9911\") " pod="openstack/glance-default-internal-api-0" Oct 07 08:15:18 crc kubenswrapper[4875]: I1007 08:15:18.787899 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xnnk4\" (UniqueName: \"kubernetes.io/projected/95d6a4f6-2e39-4b75-aadf-ca829e1e9911-kube-api-access-xnnk4\") pod \"glance-default-internal-api-0\" (UID: \"95d6a4f6-2e39-4b75-aadf-ca829e1e9911\") " pod="openstack/glance-default-internal-api-0" Oct 07 08:15:18 crc kubenswrapper[4875]: I1007 08:15:18.795196 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/95d6a4f6-2e39-4b75-aadf-ca829e1e9911-config-data\") pod \"glance-default-internal-api-0\" (UID: \"95d6a4f6-2e39-4b75-aadf-ca829e1e9911\") " pod="openstack/glance-default-internal-api-0" Oct 07 08:15:18 crc kubenswrapper[4875]: I1007 08:15:18.831696 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-internal-api-0\" (UID: \"95d6a4f6-2e39-4b75-aadf-ca829e1e9911\") " pod="openstack/glance-default-internal-api-0" Oct 07 08:15:18 crc kubenswrapper[4875]: I1007 08:15:18.957840 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 07 08:15:19 crc kubenswrapper[4875]: I1007 08:15:19.026969 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"9d4dbc12-0c00-4b2a-ad57-055d19cebf0a","Type":"ContainerStarted","Data":"0eee754a7ffec7f067ac60f2c1aa97ad110288056dda691ce94d08ebf925a4a9"} Oct 07 08:15:19 crc kubenswrapper[4875]: I1007 08:15:19.030659 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-8bg6r" event={"ID":"3f6edd8b-6c78-4ab9-9cbc-49b66303467d","Type":"ContainerDied","Data":"b37b98c64677c27cd0d4d358c448c8bf4e6e0e46637bc521f96bf3ac12e24d52"} Oct 07 08:15:19 crc kubenswrapper[4875]: I1007 08:15:19.030693 4875 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b37b98c64677c27cd0d4d358c448c8bf4e6e0e46637bc521f96bf3ac12e24d52" Oct 07 08:15:19 crc kubenswrapper[4875]: I1007 08:15:19.030669 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-8bg6r" Oct 07 08:15:19 crc kubenswrapper[4875]: I1007 08:15:19.040115 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"05e99478-4905-4598-9258-d4ffe06d1d97","Type":"ContainerStarted","Data":"b4d57755d1af2f71a426321ea033cd2ab8cb80db1d1c86fcecdd5e1a3394c9e9"} Oct 07 08:15:19 crc kubenswrapper[4875]: I1007 08:15:19.054505 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=4.054481084 podStartE2EDuration="4.054481084s" podCreationTimestamp="2025-10-07 08:15:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 08:15:19.0473198 +0000 UTC m=+1144.007090353" watchObservedRunningTime="2025-10-07 08:15:19.054481084 +0000 UTC m=+1144.014251627" Oct 07 08:15:19 crc kubenswrapper[4875]: I1007 08:15:19.089548 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-0"] Oct 07 08:15:19 crc kubenswrapper[4875]: I1007 08:15:19.091433 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Oct 07 08:15:19 crc kubenswrapper[4875]: I1007 08:15:19.094850 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-mvvw7" Oct 07 08:15:19 crc kubenswrapper[4875]: I1007 08:15:19.095065 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Oct 07 08:15:19 crc kubenswrapper[4875]: I1007 08:15:19.104247 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Oct 07 08:15:19 crc kubenswrapper[4875]: I1007 08:15:19.150221 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-plfm6\" (UniqueName: \"kubernetes.io/projected/98872838-7b40-4854-97c4-edddb9a931a5-kube-api-access-plfm6\") pod \"nova-cell0-conductor-0\" (UID: \"98872838-7b40-4854-97c4-edddb9a931a5\") " pod="openstack/nova-cell0-conductor-0" Oct 07 08:15:19 crc kubenswrapper[4875]: I1007 08:15:19.150296 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/98872838-7b40-4854-97c4-edddb9a931a5-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"98872838-7b40-4854-97c4-edddb9a931a5\") " pod="openstack/nova-cell0-conductor-0" Oct 07 08:15:19 crc kubenswrapper[4875]: I1007 08:15:19.150412 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/98872838-7b40-4854-97c4-edddb9a931a5-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"98872838-7b40-4854-97c4-edddb9a931a5\") " pod="openstack/nova-cell0-conductor-0" Oct 07 08:15:19 crc kubenswrapper[4875]: I1007 08:15:19.252331 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/98872838-7b40-4854-97c4-edddb9a931a5-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"98872838-7b40-4854-97c4-edddb9a931a5\") " pod="openstack/nova-cell0-conductor-0" Oct 07 08:15:19 crc kubenswrapper[4875]: I1007 08:15:19.252454 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-plfm6\" (UniqueName: \"kubernetes.io/projected/98872838-7b40-4854-97c4-edddb9a931a5-kube-api-access-plfm6\") pod \"nova-cell0-conductor-0\" (UID: \"98872838-7b40-4854-97c4-edddb9a931a5\") " pod="openstack/nova-cell0-conductor-0" Oct 07 08:15:19 crc kubenswrapper[4875]: I1007 08:15:19.252484 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/98872838-7b40-4854-97c4-edddb9a931a5-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"98872838-7b40-4854-97c4-edddb9a931a5\") " pod="openstack/nova-cell0-conductor-0" Oct 07 08:15:19 crc kubenswrapper[4875]: I1007 08:15:19.258812 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/98872838-7b40-4854-97c4-edddb9a931a5-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"98872838-7b40-4854-97c4-edddb9a931a5\") " pod="openstack/nova-cell0-conductor-0" Oct 07 08:15:19 crc kubenswrapper[4875]: I1007 08:15:19.259343 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/98872838-7b40-4854-97c4-edddb9a931a5-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"98872838-7b40-4854-97c4-edddb9a931a5\") " pod="openstack/nova-cell0-conductor-0" Oct 07 08:15:19 crc kubenswrapper[4875]: I1007 08:15:19.274534 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-plfm6\" (UniqueName: \"kubernetes.io/projected/98872838-7b40-4854-97c4-edddb9a931a5-kube-api-access-plfm6\") pod \"nova-cell0-conductor-0\" (UID: \"98872838-7b40-4854-97c4-edddb9a931a5\") " pod="openstack/nova-cell0-conductor-0" Oct 07 08:15:19 crc kubenswrapper[4875]: I1007 08:15:19.416054 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Oct 07 08:15:19 crc kubenswrapper[4875]: I1007 08:15:19.656662 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 07 08:15:19 crc kubenswrapper[4875]: W1007 08:15:19.668465 4875 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod95d6a4f6_2e39_4b75_aadf_ca829e1e9911.slice/crio-921eec3f98e8f260a797b4174a5b1dd99436347a2f4b485500546fca8d756bf4 WatchSource:0}: Error finding container 921eec3f98e8f260a797b4174a5b1dd99436347a2f4b485500546fca8d756bf4: Status 404 returned error can't find the container with id 921eec3f98e8f260a797b4174a5b1dd99436347a2f4b485500546fca8d756bf4 Oct 07 08:15:19 crc kubenswrapper[4875]: I1007 08:15:19.711445 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="626af307-8e17-427d-a18f-70ad5e8cc62f" path="/var/lib/kubelet/pods/626af307-8e17-427d-a18f-70ad5e8cc62f/volumes" Oct 07 08:15:19 crc kubenswrapper[4875]: I1007 08:15:19.952494 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Oct 07 08:15:20 crc kubenswrapper[4875]: I1007 08:15:20.055330 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"95d6a4f6-2e39-4b75-aadf-ca829e1e9911","Type":"ContainerStarted","Data":"921eec3f98e8f260a797b4174a5b1dd99436347a2f4b485500546fca8d756bf4"} Oct 07 08:15:20 crc kubenswrapper[4875]: I1007 08:15:20.060676 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"05e99478-4905-4598-9258-d4ffe06d1d97","Type":"ContainerStarted","Data":"71d27a07deb2804cee3c169a57830f02a9701ff7c0db035d7ca15683bbb8d593"} Oct 07 08:15:20 crc kubenswrapper[4875]: I1007 08:15:20.060722 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"05e99478-4905-4598-9258-d4ffe06d1d97","Type":"ContainerStarted","Data":"51e93f6a01db1247b670b9f637c5acb50c5fbe58d30f27bf5db4a07c39494e8b"} Oct 07 08:15:20 crc kubenswrapper[4875]: I1007 08:15:20.062550 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"98872838-7b40-4854-97c4-edddb9a931a5","Type":"ContainerStarted","Data":"d25a8bb6fa74a02c63a0dbfa7c24e811eec080f06383ad16242b329d1180496a"} Oct 07 08:15:21 crc kubenswrapper[4875]: I1007 08:15:21.075456 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"98872838-7b40-4854-97c4-edddb9a931a5","Type":"ContainerStarted","Data":"e7b0ba62d7fc8a421b32fce95a7ef03ff48066116cd2d3068f212da036904c41"} Oct 07 08:15:21 crc kubenswrapper[4875]: I1007 08:15:21.075931 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell0-conductor-0" Oct 07 08:15:21 crc kubenswrapper[4875]: I1007 08:15:21.078950 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"95d6a4f6-2e39-4b75-aadf-ca829e1e9911","Type":"ContainerStarted","Data":"52b46a9ebdc7dcaf402e1bcb8fe7aac18330f4bae30436a05750a6c364dce2ad"} Oct 07 08:15:21 crc kubenswrapper[4875]: I1007 08:15:21.078985 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"95d6a4f6-2e39-4b75-aadf-ca829e1e9911","Type":"ContainerStarted","Data":"1c917df3e5c68c6bc21056338b88096f5ccfb5684cf862bf73e3637938f49641"} Oct 07 08:15:21 crc kubenswrapper[4875]: I1007 08:15:21.083151 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"05e99478-4905-4598-9258-d4ffe06d1d97","Type":"ContainerStarted","Data":"f8d4dc71c72356b8c39cb49745e8454a478738cc8613a13f6c17df886392dc97"} Oct 07 08:15:21 crc kubenswrapper[4875]: I1007 08:15:21.105093 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-0" podStartSLOduration=2.105072267 podStartE2EDuration="2.105072267s" podCreationTimestamp="2025-10-07 08:15:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 08:15:21.099135143 +0000 UTC m=+1146.058905686" watchObservedRunningTime="2025-10-07 08:15:21.105072267 +0000 UTC m=+1146.064842810" Oct 07 08:15:21 crc kubenswrapper[4875]: I1007 08:15:21.134495 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=3.13447166 podStartE2EDuration="3.13447166s" podCreationTimestamp="2025-10-07 08:15:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 08:15:21.128056971 +0000 UTC m=+1146.087827524" watchObservedRunningTime="2025-10-07 08:15:21.13447166 +0000 UTC m=+1146.094242203" Oct 07 08:15:22 crc kubenswrapper[4875]: I1007 08:15:22.101074 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"05e99478-4905-4598-9258-d4ffe06d1d97","Type":"ContainerStarted","Data":"47c7534704be3bf462966dbb27aa44236cc60bc13604d93bbed9e764421a00cf"} Oct 07 08:15:22 crc kubenswrapper[4875]: I1007 08:15:22.102132 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Oct 07 08:15:22 crc kubenswrapper[4875]: I1007 08:15:22.121786 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.947763268 podStartE2EDuration="5.121771237s" podCreationTimestamp="2025-10-07 08:15:17 +0000 UTC" firstStartedPulling="2025-10-07 08:15:18.363354051 +0000 UTC m=+1143.323124604" lastFinishedPulling="2025-10-07 08:15:21.53736203 +0000 UTC m=+1146.497132573" observedRunningTime="2025-10-07 08:15:22.117641941 +0000 UTC m=+1147.077412484" watchObservedRunningTime="2025-10-07 08:15:22.121771237 +0000 UTC m=+1147.081541780" Oct 07 08:15:26 crc kubenswrapper[4875]: I1007 08:15:26.362525 4875 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Oct 07 08:15:26 crc kubenswrapper[4875]: I1007 08:15:26.363209 4875 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Oct 07 08:15:26 crc kubenswrapper[4875]: I1007 08:15:26.399811 4875 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Oct 07 08:15:26 crc kubenswrapper[4875]: I1007 08:15:26.416474 4875 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Oct 07 08:15:27 crc kubenswrapper[4875]: I1007 08:15:27.147571 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Oct 07 08:15:27 crc kubenswrapper[4875]: I1007 08:15:27.147633 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Oct 07 08:15:28 crc kubenswrapper[4875]: I1007 08:15:28.958886 4875 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Oct 07 08:15:28 crc kubenswrapper[4875]: I1007 08:15:28.959330 4875 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Oct 07 08:15:29 crc kubenswrapper[4875]: I1007 08:15:29.005136 4875 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Oct 07 08:15:29 crc kubenswrapper[4875]: I1007 08:15:29.015484 4875 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Oct 07 08:15:29 crc kubenswrapper[4875]: I1007 08:15:29.062954 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Oct 07 08:15:29 crc kubenswrapper[4875]: I1007 08:15:29.065368 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Oct 07 08:15:29 crc kubenswrapper[4875]: I1007 08:15:29.176636 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Oct 07 08:15:29 crc kubenswrapper[4875]: I1007 08:15:29.176905 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Oct 07 08:15:29 crc kubenswrapper[4875]: I1007 08:15:29.464870 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell0-conductor-0" Oct 07 08:15:29 crc kubenswrapper[4875]: I1007 08:15:29.969620 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-cell-mapping-vdpxw"] Oct 07 08:15:29 crc kubenswrapper[4875]: I1007 08:15:29.971394 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-vdpxw" Oct 07 08:15:29 crc kubenswrapper[4875]: I1007 08:15:29.975499 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-scripts" Oct 07 08:15:29 crc kubenswrapper[4875]: I1007 08:15:29.977592 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-config-data" Oct 07 08:15:29 crc kubenswrapper[4875]: I1007 08:15:29.986354 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-vdpxw"] Oct 07 08:15:30 crc kubenswrapper[4875]: I1007 08:15:30.069992 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/31deb1ca-dafe-4134-bf0c-d8357153d8ed-config-data\") pod \"nova-cell0-cell-mapping-vdpxw\" (UID: \"31deb1ca-dafe-4134-bf0c-d8357153d8ed\") " pod="openstack/nova-cell0-cell-mapping-vdpxw" Oct 07 08:15:30 crc kubenswrapper[4875]: I1007 08:15:30.070040 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/31deb1ca-dafe-4134-bf0c-d8357153d8ed-scripts\") pod \"nova-cell0-cell-mapping-vdpxw\" (UID: \"31deb1ca-dafe-4134-bf0c-d8357153d8ed\") " pod="openstack/nova-cell0-cell-mapping-vdpxw" Oct 07 08:15:30 crc kubenswrapper[4875]: I1007 08:15:30.070099 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ztndc\" (UniqueName: \"kubernetes.io/projected/31deb1ca-dafe-4134-bf0c-d8357153d8ed-kube-api-access-ztndc\") pod \"nova-cell0-cell-mapping-vdpxw\" (UID: \"31deb1ca-dafe-4134-bf0c-d8357153d8ed\") " pod="openstack/nova-cell0-cell-mapping-vdpxw" Oct 07 08:15:30 crc kubenswrapper[4875]: I1007 08:15:30.070160 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/31deb1ca-dafe-4134-bf0c-d8357153d8ed-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-vdpxw\" (UID: \"31deb1ca-dafe-4134-bf0c-d8357153d8ed\") " pod="openstack/nova-cell0-cell-mapping-vdpxw" Oct 07 08:15:30 crc kubenswrapper[4875]: I1007 08:15:30.119981 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Oct 07 08:15:30 crc kubenswrapper[4875]: I1007 08:15:30.122044 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 07 08:15:30 crc kubenswrapper[4875]: I1007 08:15:30.138470 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Oct 07 08:15:30 crc kubenswrapper[4875]: I1007 08:15:30.149297 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 07 08:15:30 crc kubenswrapper[4875]: I1007 08:15:30.174384 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ztndc\" (UniqueName: \"kubernetes.io/projected/31deb1ca-dafe-4134-bf0c-d8357153d8ed-kube-api-access-ztndc\") pod \"nova-cell0-cell-mapping-vdpxw\" (UID: \"31deb1ca-dafe-4134-bf0c-d8357153d8ed\") " pod="openstack/nova-cell0-cell-mapping-vdpxw" Oct 07 08:15:30 crc kubenswrapper[4875]: I1007 08:15:30.174514 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/31deb1ca-dafe-4134-bf0c-d8357153d8ed-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-vdpxw\" (UID: \"31deb1ca-dafe-4134-bf0c-d8357153d8ed\") " pod="openstack/nova-cell0-cell-mapping-vdpxw" Oct 07 08:15:30 crc kubenswrapper[4875]: I1007 08:15:30.174604 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ea3e6a51-61d8-4931-9a0b-6b49839a6f30-config-data\") pod \"nova-api-0\" (UID: \"ea3e6a51-61d8-4931-9a0b-6b49839a6f30\") " pod="openstack/nova-api-0" Oct 07 08:15:30 crc kubenswrapper[4875]: I1007 08:15:30.174649 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/31deb1ca-dafe-4134-bf0c-d8357153d8ed-config-data\") pod \"nova-cell0-cell-mapping-vdpxw\" (UID: \"31deb1ca-dafe-4134-bf0c-d8357153d8ed\") " pod="openstack/nova-cell0-cell-mapping-vdpxw" Oct 07 08:15:30 crc kubenswrapper[4875]: I1007 08:15:30.174674 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6zsr9\" (UniqueName: \"kubernetes.io/projected/ea3e6a51-61d8-4931-9a0b-6b49839a6f30-kube-api-access-6zsr9\") pod \"nova-api-0\" (UID: \"ea3e6a51-61d8-4931-9a0b-6b49839a6f30\") " pod="openstack/nova-api-0" Oct 07 08:15:30 crc kubenswrapper[4875]: I1007 08:15:30.174706 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ea3e6a51-61d8-4931-9a0b-6b49839a6f30-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"ea3e6a51-61d8-4931-9a0b-6b49839a6f30\") " pod="openstack/nova-api-0" Oct 07 08:15:30 crc kubenswrapper[4875]: I1007 08:15:30.174727 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/31deb1ca-dafe-4134-bf0c-d8357153d8ed-scripts\") pod \"nova-cell0-cell-mapping-vdpxw\" (UID: \"31deb1ca-dafe-4134-bf0c-d8357153d8ed\") " pod="openstack/nova-cell0-cell-mapping-vdpxw" Oct 07 08:15:30 crc kubenswrapper[4875]: I1007 08:15:30.174758 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ea3e6a51-61d8-4931-9a0b-6b49839a6f30-logs\") pod \"nova-api-0\" (UID: \"ea3e6a51-61d8-4931-9a0b-6b49839a6f30\") " pod="openstack/nova-api-0" Oct 07 08:15:30 crc kubenswrapper[4875]: I1007 08:15:30.186594 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/31deb1ca-dafe-4134-bf0c-d8357153d8ed-scripts\") pod \"nova-cell0-cell-mapping-vdpxw\" (UID: \"31deb1ca-dafe-4134-bf0c-d8357153d8ed\") " pod="openstack/nova-cell0-cell-mapping-vdpxw" Oct 07 08:15:30 crc kubenswrapper[4875]: I1007 08:15:30.191549 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/31deb1ca-dafe-4134-bf0c-d8357153d8ed-config-data\") pod \"nova-cell0-cell-mapping-vdpxw\" (UID: \"31deb1ca-dafe-4134-bf0c-d8357153d8ed\") " pod="openstack/nova-cell0-cell-mapping-vdpxw" Oct 07 08:15:30 crc kubenswrapper[4875]: I1007 08:15:30.204666 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/31deb1ca-dafe-4134-bf0c-d8357153d8ed-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-vdpxw\" (UID: \"31deb1ca-dafe-4134-bf0c-d8357153d8ed\") " pod="openstack/nova-cell0-cell-mapping-vdpxw" Oct 07 08:15:30 crc kubenswrapper[4875]: I1007 08:15:30.254137 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 07 08:15:30 crc kubenswrapper[4875]: I1007 08:15:30.256407 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Oct 07 08:15:30 crc kubenswrapper[4875]: I1007 08:15:30.275076 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 07 08:15:30 crc kubenswrapper[4875]: I1007 08:15:30.276418 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ea3e6a51-61d8-4931-9a0b-6b49839a6f30-config-data\") pod \"nova-api-0\" (UID: \"ea3e6a51-61d8-4931-9a0b-6b49839a6f30\") " pod="openstack/nova-api-0" Oct 07 08:15:30 crc kubenswrapper[4875]: I1007 08:15:30.276481 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6zsr9\" (UniqueName: \"kubernetes.io/projected/ea3e6a51-61d8-4931-9a0b-6b49839a6f30-kube-api-access-6zsr9\") pod \"nova-api-0\" (UID: \"ea3e6a51-61d8-4931-9a0b-6b49839a6f30\") " pod="openstack/nova-api-0" Oct 07 08:15:30 crc kubenswrapper[4875]: I1007 08:15:30.276508 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ea3e6a51-61d8-4931-9a0b-6b49839a6f30-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"ea3e6a51-61d8-4931-9a0b-6b49839a6f30\") " pod="openstack/nova-api-0" Oct 07 08:15:30 crc kubenswrapper[4875]: I1007 08:15:30.276532 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ea3e6a51-61d8-4931-9a0b-6b49839a6f30-logs\") pod \"nova-api-0\" (UID: \"ea3e6a51-61d8-4931-9a0b-6b49839a6f30\") " pod="openstack/nova-api-0" Oct 07 08:15:30 crc kubenswrapper[4875]: I1007 08:15:30.276956 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ea3e6a51-61d8-4931-9a0b-6b49839a6f30-logs\") pod \"nova-api-0\" (UID: \"ea3e6a51-61d8-4931-9a0b-6b49839a6f30\") " pod="openstack/nova-api-0" Oct 07 08:15:30 crc kubenswrapper[4875]: I1007 08:15:30.311466 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ea3e6a51-61d8-4931-9a0b-6b49839a6f30-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"ea3e6a51-61d8-4931-9a0b-6b49839a6f30\") " pod="openstack/nova-api-0" Oct 07 08:15:30 crc kubenswrapper[4875]: I1007 08:15:30.312594 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ea3e6a51-61d8-4931-9a0b-6b49839a6f30-config-data\") pod \"nova-api-0\" (UID: \"ea3e6a51-61d8-4931-9a0b-6b49839a6f30\") " pod="openstack/nova-api-0" Oct 07 08:15:30 crc kubenswrapper[4875]: I1007 08:15:30.328807 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Oct 07 08:15:30 crc kubenswrapper[4875]: I1007 08:15:30.382969 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6zsr9\" (UniqueName: \"kubernetes.io/projected/ea3e6a51-61d8-4931-9a0b-6b49839a6f30-kube-api-access-6zsr9\") pod \"nova-api-0\" (UID: \"ea3e6a51-61d8-4931-9a0b-6b49839a6f30\") " pod="openstack/nova-api-0" Oct 07 08:15:30 crc kubenswrapper[4875]: I1007 08:15:30.400421 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ztndc\" (UniqueName: \"kubernetes.io/projected/31deb1ca-dafe-4134-bf0c-d8357153d8ed-kube-api-access-ztndc\") pod \"nova-cell0-cell-mapping-vdpxw\" (UID: \"31deb1ca-dafe-4134-bf0c-d8357153d8ed\") " pod="openstack/nova-cell0-cell-mapping-vdpxw" Oct 07 08:15:30 crc kubenswrapper[4875]: I1007 08:15:30.405300 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hlp4r\" (UniqueName: \"kubernetes.io/projected/68a6ccf0-417c-4432-80b8-a0a66b545a88-kube-api-access-hlp4r\") pod \"nova-cell1-novncproxy-0\" (UID: \"68a6ccf0-417c-4432-80b8-a0a66b545a88\") " pod="openstack/nova-cell1-novncproxy-0" Oct 07 08:15:30 crc kubenswrapper[4875]: I1007 08:15:30.406478 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/68a6ccf0-417c-4432-80b8-a0a66b545a88-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"68a6ccf0-417c-4432-80b8-a0a66b545a88\") " pod="openstack/nova-cell1-novncproxy-0" Oct 07 08:15:30 crc kubenswrapper[4875]: I1007 08:15:30.406624 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/68a6ccf0-417c-4432-80b8-a0a66b545a88-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"68a6ccf0-417c-4432-80b8-a0a66b545a88\") " pod="openstack/nova-cell1-novncproxy-0" Oct 07 08:15:30 crc kubenswrapper[4875]: I1007 08:15:30.427932 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Oct 07 08:15:30 crc kubenswrapper[4875]: I1007 08:15:30.451488 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 07 08:15:30 crc kubenswrapper[4875]: I1007 08:15:30.466236 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 07 08:15:30 crc kubenswrapper[4875]: I1007 08:15:30.468542 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 07 08:15:30 crc kubenswrapper[4875]: I1007 08:15:30.471962 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Oct 07 08:15:30 crc kubenswrapper[4875]: I1007 08:15:30.509513 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hlp4r\" (UniqueName: \"kubernetes.io/projected/68a6ccf0-417c-4432-80b8-a0a66b545a88-kube-api-access-hlp4r\") pod \"nova-cell1-novncproxy-0\" (UID: \"68a6ccf0-417c-4432-80b8-a0a66b545a88\") " pod="openstack/nova-cell1-novncproxy-0" Oct 07 08:15:30 crc kubenswrapper[4875]: I1007 08:15:30.510280 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/68a6ccf0-417c-4432-80b8-a0a66b545a88-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"68a6ccf0-417c-4432-80b8-a0a66b545a88\") " pod="openstack/nova-cell1-novncproxy-0" Oct 07 08:15:30 crc kubenswrapper[4875]: I1007 08:15:30.510359 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/68a6ccf0-417c-4432-80b8-a0a66b545a88-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"68a6ccf0-417c-4432-80b8-a0a66b545a88\") " pod="openstack/nova-cell1-novncproxy-0" Oct 07 08:15:30 crc kubenswrapper[4875]: I1007 08:15:30.510862 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Oct 07 08:15:30 crc kubenswrapper[4875]: I1007 08:15:30.519647 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 07 08:15:30 crc kubenswrapper[4875]: I1007 08:15:30.521939 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Oct 07 08:15:30 crc kubenswrapper[4875]: I1007 08:15:30.525836 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Oct 07 08:15:30 crc kubenswrapper[4875]: I1007 08:15:30.528773 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/68a6ccf0-417c-4432-80b8-a0a66b545a88-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"68a6ccf0-417c-4432-80b8-a0a66b545a88\") " pod="openstack/nova-cell1-novncproxy-0" Oct 07 08:15:30 crc kubenswrapper[4875]: I1007 08:15:30.536224 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hlp4r\" (UniqueName: \"kubernetes.io/projected/68a6ccf0-417c-4432-80b8-a0a66b545a88-kube-api-access-hlp4r\") pod \"nova-cell1-novncproxy-0\" (UID: \"68a6ccf0-417c-4432-80b8-a0a66b545a88\") " pod="openstack/nova-cell1-novncproxy-0" Oct 07 08:15:30 crc kubenswrapper[4875]: I1007 08:15:30.536666 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/68a6ccf0-417c-4432-80b8-a0a66b545a88-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"68a6ccf0-417c-4432-80b8-a0a66b545a88\") " pod="openstack/nova-cell1-novncproxy-0" Oct 07 08:15:30 crc kubenswrapper[4875]: I1007 08:15:30.561151 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-bccf8f775-vp4dv"] Oct 07 08:15:30 crc kubenswrapper[4875]: I1007 08:15:30.567088 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-bccf8f775-vp4dv" Oct 07 08:15:30 crc kubenswrapper[4875]: I1007 08:15:30.573109 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-bccf8f775-vp4dv"] Oct 07 08:15:30 crc kubenswrapper[4875]: I1007 08:15:30.591908 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-vdpxw" Oct 07 08:15:30 crc kubenswrapper[4875]: I1007 08:15:30.613064 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/556c4447-afdd-414a-b68b-48bb3bc285f1-logs\") pod \"nova-metadata-0\" (UID: \"556c4447-afdd-414a-b68b-48bb3bc285f1\") " pod="openstack/nova-metadata-0" Oct 07 08:15:30 crc kubenswrapper[4875]: I1007 08:15:30.613131 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d16f042f-caee-49b6-bd11-7d12f614dc57-config\") pod \"dnsmasq-dns-bccf8f775-vp4dv\" (UID: \"d16f042f-caee-49b6-bd11-7d12f614dc57\") " pod="openstack/dnsmasq-dns-bccf8f775-vp4dv" Oct 07 08:15:30 crc kubenswrapper[4875]: I1007 08:15:30.613214 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ft86t\" (UniqueName: \"kubernetes.io/projected/d16f042f-caee-49b6-bd11-7d12f614dc57-kube-api-access-ft86t\") pod \"dnsmasq-dns-bccf8f775-vp4dv\" (UID: \"d16f042f-caee-49b6-bd11-7d12f614dc57\") " pod="openstack/dnsmasq-dns-bccf8f775-vp4dv" Oct 07 08:15:30 crc kubenswrapper[4875]: I1007 08:15:30.613291 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-678jm\" (UniqueName: \"kubernetes.io/projected/2184acaa-b7d5-43ac-ae72-aafb225f40fe-kube-api-access-678jm\") pod \"nova-scheduler-0\" (UID: \"2184acaa-b7d5-43ac-ae72-aafb225f40fe\") " pod="openstack/nova-scheduler-0" Oct 07 08:15:30 crc kubenswrapper[4875]: I1007 08:15:30.613322 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fnqr7\" (UniqueName: \"kubernetes.io/projected/556c4447-afdd-414a-b68b-48bb3bc285f1-kube-api-access-fnqr7\") pod \"nova-metadata-0\" (UID: \"556c4447-afdd-414a-b68b-48bb3bc285f1\") " pod="openstack/nova-metadata-0" Oct 07 08:15:30 crc kubenswrapper[4875]: I1007 08:15:30.613419 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2184acaa-b7d5-43ac-ae72-aafb225f40fe-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"2184acaa-b7d5-43ac-ae72-aafb225f40fe\") " pod="openstack/nova-scheduler-0" Oct 07 08:15:30 crc kubenswrapper[4875]: I1007 08:15:30.613437 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/556c4447-afdd-414a-b68b-48bb3bc285f1-config-data\") pod \"nova-metadata-0\" (UID: \"556c4447-afdd-414a-b68b-48bb3bc285f1\") " pod="openstack/nova-metadata-0" Oct 07 08:15:30 crc kubenswrapper[4875]: I1007 08:15:30.613690 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2184acaa-b7d5-43ac-ae72-aafb225f40fe-config-data\") pod \"nova-scheduler-0\" (UID: \"2184acaa-b7d5-43ac-ae72-aafb225f40fe\") " pod="openstack/nova-scheduler-0" Oct 07 08:15:30 crc kubenswrapper[4875]: I1007 08:15:30.613735 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d16f042f-caee-49b6-bd11-7d12f614dc57-ovsdbserver-nb\") pod \"dnsmasq-dns-bccf8f775-vp4dv\" (UID: \"d16f042f-caee-49b6-bd11-7d12f614dc57\") " pod="openstack/dnsmasq-dns-bccf8f775-vp4dv" Oct 07 08:15:30 crc kubenswrapper[4875]: I1007 08:15:30.613760 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/556c4447-afdd-414a-b68b-48bb3bc285f1-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"556c4447-afdd-414a-b68b-48bb3bc285f1\") " pod="openstack/nova-metadata-0" Oct 07 08:15:30 crc kubenswrapper[4875]: I1007 08:15:30.613812 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d16f042f-caee-49b6-bd11-7d12f614dc57-dns-swift-storage-0\") pod \"dnsmasq-dns-bccf8f775-vp4dv\" (UID: \"d16f042f-caee-49b6-bd11-7d12f614dc57\") " pod="openstack/dnsmasq-dns-bccf8f775-vp4dv" Oct 07 08:15:30 crc kubenswrapper[4875]: I1007 08:15:30.613843 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d16f042f-caee-49b6-bd11-7d12f614dc57-ovsdbserver-sb\") pod \"dnsmasq-dns-bccf8f775-vp4dv\" (UID: \"d16f042f-caee-49b6-bd11-7d12f614dc57\") " pod="openstack/dnsmasq-dns-bccf8f775-vp4dv" Oct 07 08:15:30 crc kubenswrapper[4875]: I1007 08:15:30.613888 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d16f042f-caee-49b6-bd11-7d12f614dc57-dns-svc\") pod \"dnsmasq-dns-bccf8f775-vp4dv\" (UID: \"d16f042f-caee-49b6-bd11-7d12f614dc57\") " pod="openstack/dnsmasq-dns-bccf8f775-vp4dv" Oct 07 08:15:30 crc kubenswrapper[4875]: I1007 08:15:30.715116 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ft86t\" (UniqueName: \"kubernetes.io/projected/d16f042f-caee-49b6-bd11-7d12f614dc57-kube-api-access-ft86t\") pod \"dnsmasq-dns-bccf8f775-vp4dv\" (UID: \"d16f042f-caee-49b6-bd11-7d12f614dc57\") " pod="openstack/dnsmasq-dns-bccf8f775-vp4dv" Oct 07 08:15:30 crc kubenswrapper[4875]: I1007 08:15:30.715602 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-678jm\" (UniqueName: \"kubernetes.io/projected/2184acaa-b7d5-43ac-ae72-aafb225f40fe-kube-api-access-678jm\") pod \"nova-scheduler-0\" (UID: \"2184acaa-b7d5-43ac-ae72-aafb225f40fe\") " pod="openstack/nova-scheduler-0" Oct 07 08:15:30 crc kubenswrapper[4875]: I1007 08:15:30.715732 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fnqr7\" (UniqueName: \"kubernetes.io/projected/556c4447-afdd-414a-b68b-48bb3bc285f1-kube-api-access-fnqr7\") pod \"nova-metadata-0\" (UID: \"556c4447-afdd-414a-b68b-48bb3bc285f1\") " pod="openstack/nova-metadata-0" Oct 07 08:15:30 crc kubenswrapper[4875]: I1007 08:15:30.715802 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2184acaa-b7d5-43ac-ae72-aafb225f40fe-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"2184acaa-b7d5-43ac-ae72-aafb225f40fe\") " pod="openstack/nova-scheduler-0" Oct 07 08:15:30 crc kubenswrapper[4875]: I1007 08:15:30.715826 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/556c4447-afdd-414a-b68b-48bb3bc285f1-config-data\") pod \"nova-metadata-0\" (UID: \"556c4447-afdd-414a-b68b-48bb3bc285f1\") " pod="openstack/nova-metadata-0" Oct 07 08:15:30 crc kubenswrapper[4875]: I1007 08:15:30.715849 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2184acaa-b7d5-43ac-ae72-aafb225f40fe-config-data\") pod \"nova-scheduler-0\" (UID: \"2184acaa-b7d5-43ac-ae72-aafb225f40fe\") " pod="openstack/nova-scheduler-0" Oct 07 08:15:30 crc kubenswrapper[4875]: I1007 08:15:30.715866 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d16f042f-caee-49b6-bd11-7d12f614dc57-ovsdbserver-nb\") pod \"dnsmasq-dns-bccf8f775-vp4dv\" (UID: \"d16f042f-caee-49b6-bd11-7d12f614dc57\") " pod="openstack/dnsmasq-dns-bccf8f775-vp4dv" Oct 07 08:15:30 crc kubenswrapper[4875]: I1007 08:15:30.715898 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/556c4447-afdd-414a-b68b-48bb3bc285f1-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"556c4447-afdd-414a-b68b-48bb3bc285f1\") " pod="openstack/nova-metadata-0" Oct 07 08:15:30 crc kubenswrapper[4875]: I1007 08:15:30.715928 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d16f042f-caee-49b6-bd11-7d12f614dc57-dns-swift-storage-0\") pod \"dnsmasq-dns-bccf8f775-vp4dv\" (UID: \"d16f042f-caee-49b6-bd11-7d12f614dc57\") " pod="openstack/dnsmasq-dns-bccf8f775-vp4dv" Oct 07 08:15:30 crc kubenswrapper[4875]: I1007 08:15:30.715952 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d16f042f-caee-49b6-bd11-7d12f614dc57-ovsdbserver-sb\") pod \"dnsmasq-dns-bccf8f775-vp4dv\" (UID: \"d16f042f-caee-49b6-bd11-7d12f614dc57\") " pod="openstack/dnsmasq-dns-bccf8f775-vp4dv" Oct 07 08:15:30 crc kubenswrapper[4875]: I1007 08:15:30.715978 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d16f042f-caee-49b6-bd11-7d12f614dc57-dns-svc\") pod \"dnsmasq-dns-bccf8f775-vp4dv\" (UID: \"d16f042f-caee-49b6-bd11-7d12f614dc57\") " pod="openstack/dnsmasq-dns-bccf8f775-vp4dv" Oct 07 08:15:30 crc kubenswrapper[4875]: I1007 08:15:30.715997 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/556c4447-afdd-414a-b68b-48bb3bc285f1-logs\") pod \"nova-metadata-0\" (UID: \"556c4447-afdd-414a-b68b-48bb3bc285f1\") " pod="openstack/nova-metadata-0" Oct 07 08:15:30 crc kubenswrapper[4875]: I1007 08:15:30.716015 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d16f042f-caee-49b6-bd11-7d12f614dc57-config\") pod \"dnsmasq-dns-bccf8f775-vp4dv\" (UID: \"d16f042f-caee-49b6-bd11-7d12f614dc57\") " pod="openstack/dnsmasq-dns-bccf8f775-vp4dv" Oct 07 08:15:30 crc kubenswrapper[4875]: I1007 08:15:30.716917 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d16f042f-caee-49b6-bd11-7d12f614dc57-config\") pod \"dnsmasq-dns-bccf8f775-vp4dv\" (UID: \"d16f042f-caee-49b6-bd11-7d12f614dc57\") " pod="openstack/dnsmasq-dns-bccf8f775-vp4dv" Oct 07 08:15:30 crc kubenswrapper[4875]: I1007 08:15:30.722985 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d16f042f-caee-49b6-bd11-7d12f614dc57-ovsdbserver-nb\") pod \"dnsmasq-dns-bccf8f775-vp4dv\" (UID: \"d16f042f-caee-49b6-bd11-7d12f614dc57\") " pod="openstack/dnsmasq-dns-bccf8f775-vp4dv" Oct 07 08:15:30 crc kubenswrapper[4875]: I1007 08:15:30.731824 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2184acaa-b7d5-43ac-ae72-aafb225f40fe-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"2184acaa-b7d5-43ac-ae72-aafb225f40fe\") " pod="openstack/nova-scheduler-0" Oct 07 08:15:30 crc kubenswrapper[4875]: I1007 08:15:30.732395 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/556c4447-afdd-414a-b68b-48bb3bc285f1-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"556c4447-afdd-414a-b68b-48bb3bc285f1\") " pod="openstack/nova-metadata-0" Oct 07 08:15:30 crc kubenswrapper[4875]: I1007 08:15:30.732566 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d16f042f-caee-49b6-bd11-7d12f614dc57-dns-swift-storage-0\") pod \"dnsmasq-dns-bccf8f775-vp4dv\" (UID: \"d16f042f-caee-49b6-bd11-7d12f614dc57\") " pod="openstack/dnsmasq-dns-bccf8f775-vp4dv" Oct 07 08:15:30 crc kubenswrapper[4875]: I1007 08:15:30.732843 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/556c4447-afdd-414a-b68b-48bb3bc285f1-config-data\") pod \"nova-metadata-0\" (UID: \"556c4447-afdd-414a-b68b-48bb3bc285f1\") " pod="openstack/nova-metadata-0" Oct 07 08:15:30 crc kubenswrapper[4875]: I1007 08:15:30.733488 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d16f042f-caee-49b6-bd11-7d12f614dc57-ovsdbserver-sb\") pod \"dnsmasq-dns-bccf8f775-vp4dv\" (UID: \"d16f042f-caee-49b6-bd11-7d12f614dc57\") " pod="openstack/dnsmasq-dns-bccf8f775-vp4dv" Oct 07 08:15:30 crc kubenswrapper[4875]: I1007 08:15:30.734141 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/556c4447-afdd-414a-b68b-48bb3bc285f1-logs\") pod \"nova-metadata-0\" (UID: \"556c4447-afdd-414a-b68b-48bb3bc285f1\") " pod="openstack/nova-metadata-0" Oct 07 08:15:30 crc kubenswrapper[4875]: I1007 08:15:30.734376 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2184acaa-b7d5-43ac-ae72-aafb225f40fe-config-data\") pod \"nova-scheduler-0\" (UID: \"2184acaa-b7d5-43ac-ae72-aafb225f40fe\") " pod="openstack/nova-scheduler-0" Oct 07 08:15:30 crc kubenswrapper[4875]: I1007 08:15:30.734680 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d16f042f-caee-49b6-bd11-7d12f614dc57-dns-svc\") pod \"dnsmasq-dns-bccf8f775-vp4dv\" (UID: \"d16f042f-caee-49b6-bd11-7d12f614dc57\") " pod="openstack/dnsmasq-dns-bccf8f775-vp4dv" Oct 07 08:15:30 crc kubenswrapper[4875]: I1007 08:15:30.740911 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ft86t\" (UniqueName: \"kubernetes.io/projected/d16f042f-caee-49b6-bd11-7d12f614dc57-kube-api-access-ft86t\") pod \"dnsmasq-dns-bccf8f775-vp4dv\" (UID: \"d16f042f-caee-49b6-bd11-7d12f614dc57\") " pod="openstack/dnsmasq-dns-bccf8f775-vp4dv" Oct 07 08:15:30 crc kubenswrapper[4875]: I1007 08:15:30.745763 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-678jm\" (UniqueName: \"kubernetes.io/projected/2184acaa-b7d5-43ac-ae72-aafb225f40fe-kube-api-access-678jm\") pod \"nova-scheduler-0\" (UID: \"2184acaa-b7d5-43ac-ae72-aafb225f40fe\") " pod="openstack/nova-scheduler-0" Oct 07 08:15:30 crc kubenswrapper[4875]: I1007 08:15:30.749133 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Oct 07 08:15:30 crc kubenswrapper[4875]: I1007 08:15:30.754380 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fnqr7\" (UniqueName: \"kubernetes.io/projected/556c4447-afdd-414a-b68b-48bb3bc285f1-kube-api-access-fnqr7\") pod \"nova-metadata-0\" (UID: \"556c4447-afdd-414a-b68b-48bb3bc285f1\") " pod="openstack/nova-metadata-0" Oct 07 08:15:30 crc kubenswrapper[4875]: I1007 08:15:30.800430 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 07 08:15:30 crc kubenswrapper[4875]: I1007 08:15:30.856072 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 07 08:15:30 crc kubenswrapper[4875]: I1007 08:15:30.892272 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-bccf8f775-vp4dv" Oct 07 08:15:31 crc kubenswrapper[4875]: I1007 08:15:31.059185 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-vdpxw"] Oct 07 08:15:31 crc kubenswrapper[4875]: I1007 08:15:31.143322 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 07 08:15:31 crc kubenswrapper[4875]: W1007 08:15:31.179683 4875 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podea3e6a51_61d8_4931_9a0b_6b49839a6f30.slice/crio-622032ceb4539f5f395043790744d203c69e7266af2d20955e51bb866c1a46bc WatchSource:0}: Error finding container 622032ceb4539f5f395043790744d203c69e7266af2d20955e51bb866c1a46bc: Status 404 returned error can't find the container with id 622032ceb4539f5f395043790744d203c69e7266af2d20955e51bb866c1a46bc Oct 07 08:15:31 crc kubenswrapper[4875]: I1007 08:15:31.260702 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"ea3e6a51-61d8-4931-9a0b-6b49839a6f30","Type":"ContainerStarted","Data":"622032ceb4539f5f395043790744d203c69e7266af2d20955e51bb866c1a46bc"} Oct 07 08:15:31 crc kubenswrapper[4875]: I1007 08:15:31.264390 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-vdpxw" event={"ID":"31deb1ca-dafe-4134-bf0c-d8357153d8ed","Type":"ContainerStarted","Data":"42c5ec0b86f7788c33739596c0af0ed1c71ff0e88068581eadf03391c73075a6"} Oct 07 08:15:31 crc kubenswrapper[4875]: I1007 08:15:31.431152 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 07 08:15:31 crc kubenswrapper[4875]: I1007 08:15:31.501037 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 07 08:15:31 crc kubenswrapper[4875]: I1007 08:15:31.514550 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-db-sync-cdldk"] Oct 07 08:15:31 crc kubenswrapper[4875]: I1007 08:15:31.516246 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-cdldk" Oct 07 08:15:31 crc kubenswrapper[4875]: I1007 08:15:31.521287 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Oct 07 08:15:31 crc kubenswrapper[4875]: I1007 08:15:31.521948 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-scripts" Oct 07 08:15:31 crc kubenswrapper[4875]: I1007 08:15:31.528261 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-cdldk"] Oct 07 08:15:31 crc kubenswrapper[4875]: I1007 08:15:31.640562 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bf652b8b-3c7e-4673-a2f4-4af434e1ce0b-config-data\") pod \"nova-cell1-conductor-db-sync-cdldk\" (UID: \"bf652b8b-3c7e-4673-a2f4-4af434e1ce0b\") " pod="openstack/nova-cell1-conductor-db-sync-cdldk" Oct 07 08:15:31 crc kubenswrapper[4875]: I1007 08:15:31.640684 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bf652b8b-3c7e-4673-a2f4-4af434e1ce0b-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-cdldk\" (UID: \"bf652b8b-3c7e-4673-a2f4-4af434e1ce0b\") " pod="openstack/nova-cell1-conductor-db-sync-cdldk" Oct 07 08:15:31 crc kubenswrapper[4875]: I1007 08:15:31.640729 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bf652b8b-3c7e-4673-a2f4-4af434e1ce0b-scripts\") pod \"nova-cell1-conductor-db-sync-cdldk\" (UID: \"bf652b8b-3c7e-4673-a2f4-4af434e1ce0b\") " pod="openstack/nova-cell1-conductor-db-sync-cdldk" Oct 07 08:15:31 crc kubenswrapper[4875]: I1007 08:15:31.641130 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8bhd2\" (UniqueName: \"kubernetes.io/projected/bf652b8b-3c7e-4673-a2f4-4af434e1ce0b-kube-api-access-8bhd2\") pod \"nova-cell1-conductor-db-sync-cdldk\" (UID: \"bf652b8b-3c7e-4673-a2f4-4af434e1ce0b\") " pod="openstack/nova-cell1-conductor-db-sync-cdldk" Oct 07 08:15:31 crc kubenswrapper[4875]: I1007 08:15:31.647202 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Oct 07 08:15:31 crc kubenswrapper[4875]: W1007 08:15:31.660660 4875 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2184acaa_b7d5_43ac_ae72_aafb225f40fe.slice/crio-f49b98a849256fdc212a874a33a6ebee78cb3cbfdcb120cf336befcde29b2c3b WatchSource:0}: Error finding container f49b98a849256fdc212a874a33a6ebee78cb3cbfdcb120cf336befcde29b2c3b: Status 404 returned error can't find the container with id f49b98a849256fdc212a874a33a6ebee78cb3cbfdcb120cf336befcde29b2c3b Oct 07 08:15:31 crc kubenswrapper[4875]: W1007 08:15:31.692223 4875 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd16f042f_caee_49b6_bd11_7d12f614dc57.slice/crio-3633d29d56861a4bd7972a3e63d9b6314735f3a687792cf193fb7263e431b7b9 WatchSource:0}: Error finding container 3633d29d56861a4bd7972a3e63d9b6314735f3a687792cf193fb7263e431b7b9: Status 404 returned error can't find the container with id 3633d29d56861a4bd7972a3e63d9b6314735f3a687792cf193fb7263e431b7b9 Oct 07 08:15:31 crc kubenswrapper[4875]: I1007 08:15:31.726071 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-bccf8f775-vp4dv"] Oct 07 08:15:31 crc kubenswrapper[4875]: I1007 08:15:31.748885 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bf652b8b-3c7e-4673-a2f4-4af434e1ce0b-config-data\") pod \"nova-cell1-conductor-db-sync-cdldk\" (UID: \"bf652b8b-3c7e-4673-a2f4-4af434e1ce0b\") " pod="openstack/nova-cell1-conductor-db-sync-cdldk" Oct 07 08:15:31 crc kubenswrapper[4875]: I1007 08:15:31.749153 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bf652b8b-3c7e-4673-a2f4-4af434e1ce0b-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-cdldk\" (UID: \"bf652b8b-3c7e-4673-a2f4-4af434e1ce0b\") " pod="openstack/nova-cell1-conductor-db-sync-cdldk" Oct 07 08:15:31 crc kubenswrapper[4875]: I1007 08:15:31.749199 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bf652b8b-3c7e-4673-a2f4-4af434e1ce0b-scripts\") pod \"nova-cell1-conductor-db-sync-cdldk\" (UID: \"bf652b8b-3c7e-4673-a2f4-4af434e1ce0b\") " pod="openstack/nova-cell1-conductor-db-sync-cdldk" Oct 07 08:15:31 crc kubenswrapper[4875]: I1007 08:15:31.761324 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8bhd2\" (UniqueName: \"kubernetes.io/projected/bf652b8b-3c7e-4673-a2f4-4af434e1ce0b-kube-api-access-8bhd2\") pod \"nova-cell1-conductor-db-sync-cdldk\" (UID: \"bf652b8b-3c7e-4673-a2f4-4af434e1ce0b\") " pod="openstack/nova-cell1-conductor-db-sync-cdldk" Oct 07 08:15:31 crc kubenswrapper[4875]: I1007 08:15:31.765389 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bf652b8b-3c7e-4673-a2f4-4af434e1ce0b-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-cdldk\" (UID: \"bf652b8b-3c7e-4673-a2f4-4af434e1ce0b\") " pod="openstack/nova-cell1-conductor-db-sync-cdldk" Oct 07 08:15:31 crc kubenswrapper[4875]: I1007 08:15:31.765733 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bf652b8b-3c7e-4673-a2f4-4af434e1ce0b-scripts\") pod \"nova-cell1-conductor-db-sync-cdldk\" (UID: \"bf652b8b-3c7e-4673-a2f4-4af434e1ce0b\") " pod="openstack/nova-cell1-conductor-db-sync-cdldk" Oct 07 08:15:31 crc kubenswrapper[4875]: I1007 08:15:31.771522 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bf652b8b-3c7e-4673-a2f4-4af434e1ce0b-config-data\") pod \"nova-cell1-conductor-db-sync-cdldk\" (UID: \"bf652b8b-3c7e-4673-a2f4-4af434e1ce0b\") " pod="openstack/nova-cell1-conductor-db-sync-cdldk" Oct 07 08:15:31 crc kubenswrapper[4875]: I1007 08:15:31.797620 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8bhd2\" (UniqueName: \"kubernetes.io/projected/bf652b8b-3c7e-4673-a2f4-4af434e1ce0b-kube-api-access-8bhd2\") pod \"nova-cell1-conductor-db-sync-cdldk\" (UID: \"bf652b8b-3c7e-4673-a2f4-4af434e1ce0b\") " pod="openstack/nova-cell1-conductor-db-sync-cdldk" Oct 07 08:15:31 crc kubenswrapper[4875]: I1007 08:15:31.873827 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-cdldk" Oct 07 08:15:31 crc kubenswrapper[4875]: I1007 08:15:31.922605 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Oct 07 08:15:31 crc kubenswrapper[4875]: I1007 08:15:31.922866 4875 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 07 08:15:31 crc kubenswrapper[4875]: I1007 08:15:31.991433 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Oct 07 08:15:32 crc kubenswrapper[4875]: I1007 08:15:32.244432 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-cdldk"] Oct 07 08:15:32 crc kubenswrapper[4875]: I1007 08:15:32.293642 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"68a6ccf0-417c-4432-80b8-a0a66b545a88","Type":"ContainerStarted","Data":"798bebf80660c92520ccbad4a4ae40c1e96cf110d5ef3f978c241601883bc0a6"} Oct 07 08:15:32 crc kubenswrapper[4875]: I1007 08:15:32.318676 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-vdpxw" event={"ID":"31deb1ca-dafe-4134-bf0c-d8357153d8ed","Type":"ContainerStarted","Data":"8a38f71ca8f0da6b4b2b8e740f3d07ee88fe560e47d3a66f9bb4a9eb24543eb7"} Oct 07 08:15:32 crc kubenswrapper[4875]: I1007 08:15:32.347952 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"2184acaa-b7d5-43ac-ae72-aafb225f40fe","Type":"ContainerStarted","Data":"f49b98a849256fdc212a874a33a6ebee78cb3cbfdcb120cf336befcde29b2c3b"} Oct 07 08:15:32 crc kubenswrapper[4875]: I1007 08:15:32.349986 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-cell-mapping-vdpxw" podStartSLOduration=3.3499615289999998 podStartE2EDuration="3.349961529s" podCreationTimestamp="2025-10-07 08:15:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 08:15:32.345230395 +0000 UTC m=+1157.305000958" watchObservedRunningTime="2025-10-07 08:15:32.349961529 +0000 UTC m=+1157.309732072" Oct 07 08:15:32 crc kubenswrapper[4875]: I1007 08:15:32.360710 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"556c4447-afdd-414a-b68b-48bb3bc285f1","Type":"ContainerStarted","Data":"8216effbce1fb853ca5fb6dcef4cd2548292bc40463eeacab2707630b7451936"} Oct 07 08:15:32 crc kubenswrapper[4875]: I1007 08:15:32.375057 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-cdldk" event={"ID":"bf652b8b-3c7e-4673-a2f4-4af434e1ce0b","Type":"ContainerStarted","Data":"c62a6325a6e82b4789966d169c392aa910253907d10d3da27b83170ae0887dfc"} Oct 07 08:15:32 crc kubenswrapper[4875]: I1007 08:15:32.392184 4875 generic.go:334] "Generic (PLEG): container finished" podID="d16f042f-caee-49b6-bd11-7d12f614dc57" containerID="2e4b7eafad22d04f1754a1a891cbaf3b46db13f16f7a502bbc1cf88083a7497d" exitCode=0 Oct 07 08:15:32 crc kubenswrapper[4875]: I1007 08:15:32.392620 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-bccf8f775-vp4dv" event={"ID":"d16f042f-caee-49b6-bd11-7d12f614dc57","Type":"ContainerDied","Data":"2e4b7eafad22d04f1754a1a891cbaf3b46db13f16f7a502bbc1cf88083a7497d"} Oct 07 08:15:32 crc kubenswrapper[4875]: I1007 08:15:32.392713 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-bccf8f775-vp4dv" event={"ID":"d16f042f-caee-49b6-bd11-7d12f614dc57","Type":"ContainerStarted","Data":"3633d29d56861a4bd7972a3e63d9b6314735f3a687792cf193fb7263e431b7b9"} Oct 07 08:15:33 crc kubenswrapper[4875]: I1007 08:15:33.417845 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-cdldk" event={"ID":"bf652b8b-3c7e-4673-a2f4-4af434e1ce0b","Type":"ContainerStarted","Data":"83d348756739527f131afb9ecef42e53c72b8631d7429e118e7f7ce6fb7335cd"} Oct 07 08:15:33 crc kubenswrapper[4875]: I1007 08:15:33.430158 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-bccf8f775-vp4dv" event={"ID":"d16f042f-caee-49b6-bd11-7d12f614dc57","Type":"ContainerStarted","Data":"8a418f7f2c6ec470bdc4ee0427bd1dfd58b0c03de2be564965a8de51bf4ff150"} Oct 07 08:15:33 crc kubenswrapper[4875]: I1007 08:15:33.430302 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-bccf8f775-vp4dv" Oct 07 08:15:33 crc kubenswrapper[4875]: I1007 08:15:33.465674 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-db-sync-cdldk" podStartSLOduration=2.465651093 podStartE2EDuration="2.465651093s" podCreationTimestamp="2025-10-07 08:15:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 08:15:33.454992203 +0000 UTC m=+1158.414762746" watchObservedRunningTime="2025-10-07 08:15:33.465651093 +0000 UTC m=+1158.425421636" Oct 07 08:15:33 crc kubenswrapper[4875]: I1007 08:15:33.487211 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-bccf8f775-vp4dv" podStartSLOduration=3.487194418 podStartE2EDuration="3.487194418s" podCreationTimestamp="2025-10-07 08:15:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 08:15:33.482225686 +0000 UTC m=+1158.441996229" watchObservedRunningTime="2025-10-07 08:15:33.487194418 +0000 UTC m=+1158.446964961" Oct 07 08:15:33 crc kubenswrapper[4875]: I1007 08:15:33.844390 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Oct 07 08:15:33 crc kubenswrapper[4875]: I1007 08:15:33.900818 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 07 08:15:37 crc kubenswrapper[4875]: I1007 08:15:37.476279 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"ea3e6a51-61d8-4931-9a0b-6b49839a6f30","Type":"ContainerStarted","Data":"ef5c55196f486fcaf605c9a56b822a4250f48fd2dec2e0d1ca942077e9642376"} Oct 07 08:15:37 crc kubenswrapper[4875]: I1007 08:15:37.476934 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"ea3e6a51-61d8-4931-9a0b-6b49839a6f30","Type":"ContainerStarted","Data":"845f5361463692e00f406a302ba10885ef0daf84aac41c5c3b75abfd79f11b5c"} Oct 07 08:15:37 crc kubenswrapper[4875]: I1007 08:15:37.481601 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"556c4447-afdd-414a-b68b-48bb3bc285f1","Type":"ContainerStarted","Data":"5fa70f198e8e14b0c8fd7a0ecf16c9753d8d5372f6f717c5a04633255bf54316"} Oct 07 08:15:37 crc kubenswrapper[4875]: I1007 08:15:37.481642 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"556c4447-afdd-414a-b68b-48bb3bc285f1","Type":"ContainerStarted","Data":"77d9ce7da204aedd6a9ce3351a28ea16475140cf66fcbfec28a2ad5e72dd9fcf"} Oct 07 08:15:37 crc kubenswrapper[4875]: I1007 08:15:37.481685 4875 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="556c4447-afdd-414a-b68b-48bb3bc285f1" containerName="nova-metadata-log" containerID="cri-o://77d9ce7da204aedd6a9ce3351a28ea16475140cf66fcbfec28a2ad5e72dd9fcf" gracePeriod=30 Oct 07 08:15:37 crc kubenswrapper[4875]: I1007 08:15:37.481733 4875 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="556c4447-afdd-414a-b68b-48bb3bc285f1" containerName="nova-metadata-metadata" containerID="cri-o://5fa70f198e8e14b0c8fd7a0ecf16c9753d8d5372f6f717c5a04633255bf54316" gracePeriod=30 Oct 07 08:15:37 crc kubenswrapper[4875]: I1007 08:15:37.496534 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"68a6ccf0-417c-4432-80b8-a0a66b545a88","Type":"ContainerStarted","Data":"5783bd79e8a92f64fcb51ace08d0bfd48321d2bfe5be82df768d5f656c6a90fc"} Oct 07 08:15:37 crc kubenswrapper[4875]: I1007 08:15:37.496694 4875 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-novncproxy-0" podUID="68a6ccf0-417c-4432-80b8-a0a66b545a88" containerName="nova-cell1-novncproxy-novncproxy" containerID="cri-o://5783bd79e8a92f64fcb51ace08d0bfd48321d2bfe5be82df768d5f656c6a90fc" gracePeriod=30 Oct 07 08:15:37 crc kubenswrapper[4875]: I1007 08:15:37.502037 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"2184acaa-b7d5-43ac-ae72-aafb225f40fe","Type":"ContainerStarted","Data":"063882deb3100b6172c24e106eab4590eaae32739f6d6c4b08e07b63af27b6b9"} Oct 07 08:15:37 crc kubenswrapper[4875]: I1007 08:15:37.512633 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.581266608 podStartE2EDuration="7.512611492s" podCreationTimestamp="2025-10-07 08:15:30 +0000 UTC" firstStartedPulling="2025-10-07 08:15:31.189572482 +0000 UTC m=+1156.149343025" lastFinishedPulling="2025-10-07 08:15:36.120917366 +0000 UTC m=+1161.080687909" observedRunningTime="2025-10-07 08:15:37.504523517 +0000 UTC m=+1162.464294060" watchObservedRunningTime="2025-10-07 08:15:37.512611492 +0000 UTC m=+1162.472382035" Oct 07 08:15:37 crc kubenswrapper[4875]: I1007 08:15:37.535390 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=3.080424772 podStartE2EDuration="7.535363647s" podCreationTimestamp="2025-10-07 08:15:30 +0000 UTC" firstStartedPulling="2025-10-07 08:15:31.662719694 +0000 UTC m=+1156.622490227" lastFinishedPulling="2025-10-07 08:15:36.117658559 +0000 UTC m=+1161.077429102" observedRunningTime="2025-10-07 08:15:37.522747173 +0000 UTC m=+1162.482517736" watchObservedRunningTime="2025-10-07 08:15:37.535363647 +0000 UTC m=+1162.495134200" Oct 07 08:15:37 crc kubenswrapper[4875]: I1007 08:15:37.541628 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=2.970429158 podStartE2EDuration="7.541601272s" podCreationTimestamp="2025-10-07 08:15:30 +0000 UTC" firstStartedPulling="2025-10-07 08:15:31.524795375 +0000 UTC m=+1156.484565918" lastFinishedPulling="2025-10-07 08:15:36.095967489 +0000 UTC m=+1161.055738032" observedRunningTime="2025-10-07 08:15:37.539122151 +0000 UTC m=+1162.498892714" watchObservedRunningTime="2025-10-07 08:15:37.541601272 +0000 UTC m=+1162.501371815" Oct 07 08:15:37 crc kubenswrapper[4875]: I1007 08:15:37.575206 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=3.029244775 podStartE2EDuration="7.575185982s" podCreationTimestamp="2025-10-07 08:15:30 +0000 UTC" firstStartedPulling="2025-10-07 08:15:31.550046922 +0000 UTC m=+1156.509817465" lastFinishedPulling="2025-10-07 08:15:36.095988129 +0000 UTC m=+1161.055758672" observedRunningTime="2025-10-07 08:15:37.563013823 +0000 UTC m=+1162.522784386" watchObservedRunningTime="2025-10-07 08:15:37.575185982 +0000 UTC m=+1162.534956525" Oct 07 08:15:38 crc kubenswrapper[4875]: I1007 08:15:38.166636 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 07 08:15:38 crc kubenswrapper[4875]: I1007 08:15:38.281298 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/556c4447-afdd-414a-b68b-48bb3bc285f1-config-data\") pod \"556c4447-afdd-414a-b68b-48bb3bc285f1\" (UID: \"556c4447-afdd-414a-b68b-48bb3bc285f1\") " Oct 07 08:15:38 crc kubenswrapper[4875]: I1007 08:15:38.281470 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fnqr7\" (UniqueName: \"kubernetes.io/projected/556c4447-afdd-414a-b68b-48bb3bc285f1-kube-api-access-fnqr7\") pod \"556c4447-afdd-414a-b68b-48bb3bc285f1\" (UID: \"556c4447-afdd-414a-b68b-48bb3bc285f1\") " Oct 07 08:15:38 crc kubenswrapper[4875]: I1007 08:15:38.281518 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/556c4447-afdd-414a-b68b-48bb3bc285f1-logs\") pod \"556c4447-afdd-414a-b68b-48bb3bc285f1\" (UID: \"556c4447-afdd-414a-b68b-48bb3bc285f1\") " Oct 07 08:15:38 crc kubenswrapper[4875]: I1007 08:15:38.281541 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/556c4447-afdd-414a-b68b-48bb3bc285f1-combined-ca-bundle\") pod \"556c4447-afdd-414a-b68b-48bb3bc285f1\" (UID: \"556c4447-afdd-414a-b68b-48bb3bc285f1\") " Oct 07 08:15:38 crc kubenswrapper[4875]: I1007 08:15:38.282526 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/556c4447-afdd-414a-b68b-48bb3bc285f1-logs" (OuterVolumeSpecName: "logs") pod "556c4447-afdd-414a-b68b-48bb3bc285f1" (UID: "556c4447-afdd-414a-b68b-48bb3bc285f1"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 08:15:38 crc kubenswrapper[4875]: I1007 08:15:38.288497 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/556c4447-afdd-414a-b68b-48bb3bc285f1-kube-api-access-fnqr7" (OuterVolumeSpecName: "kube-api-access-fnqr7") pod "556c4447-afdd-414a-b68b-48bb3bc285f1" (UID: "556c4447-afdd-414a-b68b-48bb3bc285f1"). InnerVolumeSpecName "kube-api-access-fnqr7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 08:15:38 crc kubenswrapper[4875]: I1007 08:15:38.314312 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/556c4447-afdd-414a-b68b-48bb3bc285f1-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "556c4447-afdd-414a-b68b-48bb3bc285f1" (UID: "556c4447-afdd-414a-b68b-48bb3bc285f1"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:15:38 crc kubenswrapper[4875]: I1007 08:15:38.328356 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/556c4447-afdd-414a-b68b-48bb3bc285f1-config-data" (OuterVolumeSpecName: "config-data") pod "556c4447-afdd-414a-b68b-48bb3bc285f1" (UID: "556c4447-afdd-414a-b68b-48bb3bc285f1"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:15:38 crc kubenswrapper[4875]: I1007 08:15:38.385318 4875 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/556c4447-afdd-414a-b68b-48bb3bc285f1-config-data\") on node \"crc\" DevicePath \"\"" Oct 07 08:15:38 crc kubenswrapper[4875]: I1007 08:15:38.385380 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fnqr7\" (UniqueName: \"kubernetes.io/projected/556c4447-afdd-414a-b68b-48bb3bc285f1-kube-api-access-fnqr7\") on node \"crc\" DevicePath \"\"" Oct 07 08:15:38 crc kubenswrapper[4875]: I1007 08:15:38.385405 4875 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/556c4447-afdd-414a-b68b-48bb3bc285f1-logs\") on node \"crc\" DevicePath \"\"" Oct 07 08:15:38 crc kubenswrapper[4875]: I1007 08:15:38.385420 4875 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/556c4447-afdd-414a-b68b-48bb3bc285f1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 08:15:38 crc kubenswrapper[4875]: I1007 08:15:38.515027 4875 generic.go:334] "Generic (PLEG): container finished" podID="556c4447-afdd-414a-b68b-48bb3bc285f1" containerID="5fa70f198e8e14b0c8fd7a0ecf16c9753d8d5372f6f717c5a04633255bf54316" exitCode=0 Oct 07 08:15:38 crc kubenswrapper[4875]: I1007 08:15:38.515076 4875 generic.go:334] "Generic (PLEG): container finished" podID="556c4447-afdd-414a-b68b-48bb3bc285f1" containerID="77d9ce7da204aedd6a9ce3351a28ea16475140cf66fcbfec28a2ad5e72dd9fcf" exitCode=143 Oct 07 08:15:38 crc kubenswrapper[4875]: I1007 08:15:38.515102 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"556c4447-afdd-414a-b68b-48bb3bc285f1","Type":"ContainerDied","Data":"5fa70f198e8e14b0c8fd7a0ecf16c9753d8d5372f6f717c5a04633255bf54316"} Oct 07 08:15:38 crc kubenswrapper[4875]: I1007 08:15:38.515217 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"556c4447-afdd-414a-b68b-48bb3bc285f1","Type":"ContainerDied","Data":"77d9ce7da204aedd6a9ce3351a28ea16475140cf66fcbfec28a2ad5e72dd9fcf"} Oct 07 08:15:38 crc kubenswrapper[4875]: I1007 08:15:38.515145 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 07 08:15:38 crc kubenswrapper[4875]: I1007 08:15:38.515236 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"556c4447-afdd-414a-b68b-48bb3bc285f1","Type":"ContainerDied","Data":"8216effbce1fb853ca5fb6dcef4cd2548292bc40463eeacab2707630b7451936"} Oct 07 08:15:38 crc kubenswrapper[4875]: I1007 08:15:38.515262 4875 scope.go:117] "RemoveContainer" containerID="5fa70f198e8e14b0c8fd7a0ecf16c9753d8d5372f6f717c5a04633255bf54316" Oct 07 08:15:38 crc kubenswrapper[4875]: I1007 08:15:38.553245 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Oct 07 08:15:38 crc kubenswrapper[4875]: I1007 08:15:38.564399 4875 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Oct 07 08:15:38 crc kubenswrapper[4875]: I1007 08:15:38.606034 4875 scope.go:117] "RemoveContainer" containerID="77d9ce7da204aedd6a9ce3351a28ea16475140cf66fcbfec28a2ad5e72dd9fcf" Oct 07 08:15:38 crc kubenswrapper[4875]: I1007 08:15:38.607135 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Oct 07 08:15:38 crc kubenswrapper[4875]: E1007 08:15:38.607816 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="556c4447-afdd-414a-b68b-48bb3bc285f1" containerName="nova-metadata-log" Oct 07 08:15:38 crc kubenswrapper[4875]: I1007 08:15:38.607846 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="556c4447-afdd-414a-b68b-48bb3bc285f1" containerName="nova-metadata-log" Oct 07 08:15:38 crc kubenswrapper[4875]: E1007 08:15:38.607898 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="556c4447-afdd-414a-b68b-48bb3bc285f1" containerName="nova-metadata-metadata" Oct 07 08:15:38 crc kubenswrapper[4875]: I1007 08:15:38.607911 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="556c4447-afdd-414a-b68b-48bb3bc285f1" containerName="nova-metadata-metadata" Oct 07 08:15:38 crc kubenswrapper[4875]: I1007 08:15:38.608246 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="556c4447-afdd-414a-b68b-48bb3bc285f1" containerName="nova-metadata-log" Oct 07 08:15:38 crc kubenswrapper[4875]: I1007 08:15:38.608307 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="556c4447-afdd-414a-b68b-48bb3bc285f1" containerName="nova-metadata-metadata" Oct 07 08:15:38 crc kubenswrapper[4875]: I1007 08:15:38.609978 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 07 08:15:38 crc kubenswrapper[4875]: I1007 08:15:38.616534 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 07 08:15:38 crc kubenswrapper[4875]: I1007 08:15:38.618423 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Oct 07 08:15:38 crc kubenswrapper[4875]: I1007 08:15:38.618967 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Oct 07 08:15:38 crc kubenswrapper[4875]: I1007 08:15:38.655599 4875 scope.go:117] "RemoveContainer" containerID="5fa70f198e8e14b0c8fd7a0ecf16c9753d8d5372f6f717c5a04633255bf54316" Oct 07 08:15:38 crc kubenswrapper[4875]: E1007 08:15:38.657004 4875 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5fa70f198e8e14b0c8fd7a0ecf16c9753d8d5372f6f717c5a04633255bf54316\": container with ID starting with 5fa70f198e8e14b0c8fd7a0ecf16c9753d8d5372f6f717c5a04633255bf54316 not found: ID does not exist" containerID="5fa70f198e8e14b0c8fd7a0ecf16c9753d8d5372f6f717c5a04633255bf54316" Oct 07 08:15:38 crc kubenswrapper[4875]: I1007 08:15:38.657053 4875 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5fa70f198e8e14b0c8fd7a0ecf16c9753d8d5372f6f717c5a04633255bf54316"} err="failed to get container status \"5fa70f198e8e14b0c8fd7a0ecf16c9753d8d5372f6f717c5a04633255bf54316\": rpc error: code = NotFound desc = could not find container \"5fa70f198e8e14b0c8fd7a0ecf16c9753d8d5372f6f717c5a04633255bf54316\": container with ID starting with 5fa70f198e8e14b0c8fd7a0ecf16c9753d8d5372f6f717c5a04633255bf54316 not found: ID does not exist" Oct 07 08:15:38 crc kubenswrapper[4875]: I1007 08:15:38.657091 4875 scope.go:117] "RemoveContainer" containerID="77d9ce7da204aedd6a9ce3351a28ea16475140cf66fcbfec28a2ad5e72dd9fcf" Oct 07 08:15:38 crc kubenswrapper[4875]: E1007 08:15:38.658177 4875 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"77d9ce7da204aedd6a9ce3351a28ea16475140cf66fcbfec28a2ad5e72dd9fcf\": container with ID starting with 77d9ce7da204aedd6a9ce3351a28ea16475140cf66fcbfec28a2ad5e72dd9fcf not found: ID does not exist" containerID="77d9ce7da204aedd6a9ce3351a28ea16475140cf66fcbfec28a2ad5e72dd9fcf" Oct 07 08:15:38 crc kubenswrapper[4875]: I1007 08:15:38.658255 4875 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"77d9ce7da204aedd6a9ce3351a28ea16475140cf66fcbfec28a2ad5e72dd9fcf"} err="failed to get container status \"77d9ce7da204aedd6a9ce3351a28ea16475140cf66fcbfec28a2ad5e72dd9fcf\": rpc error: code = NotFound desc = could not find container \"77d9ce7da204aedd6a9ce3351a28ea16475140cf66fcbfec28a2ad5e72dd9fcf\": container with ID starting with 77d9ce7da204aedd6a9ce3351a28ea16475140cf66fcbfec28a2ad5e72dd9fcf not found: ID does not exist" Oct 07 08:15:38 crc kubenswrapper[4875]: I1007 08:15:38.658305 4875 scope.go:117] "RemoveContainer" containerID="5fa70f198e8e14b0c8fd7a0ecf16c9753d8d5372f6f717c5a04633255bf54316" Oct 07 08:15:38 crc kubenswrapper[4875]: I1007 08:15:38.658902 4875 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5fa70f198e8e14b0c8fd7a0ecf16c9753d8d5372f6f717c5a04633255bf54316"} err="failed to get container status \"5fa70f198e8e14b0c8fd7a0ecf16c9753d8d5372f6f717c5a04633255bf54316\": rpc error: code = NotFound desc = could not find container \"5fa70f198e8e14b0c8fd7a0ecf16c9753d8d5372f6f717c5a04633255bf54316\": container with ID starting with 5fa70f198e8e14b0c8fd7a0ecf16c9753d8d5372f6f717c5a04633255bf54316 not found: ID does not exist" Oct 07 08:15:38 crc kubenswrapper[4875]: I1007 08:15:38.658935 4875 scope.go:117] "RemoveContainer" containerID="77d9ce7da204aedd6a9ce3351a28ea16475140cf66fcbfec28a2ad5e72dd9fcf" Oct 07 08:15:38 crc kubenswrapper[4875]: I1007 08:15:38.659792 4875 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"77d9ce7da204aedd6a9ce3351a28ea16475140cf66fcbfec28a2ad5e72dd9fcf"} err="failed to get container status \"77d9ce7da204aedd6a9ce3351a28ea16475140cf66fcbfec28a2ad5e72dd9fcf\": rpc error: code = NotFound desc = could not find container \"77d9ce7da204aedd6a9ce3351a28ea16475140cf66fcbfec28a2ad5e72dd9fcf\": container with ID starting with 77d9ce7da204aedd6a9ce3351a28ea16475140cf66fcbfec28a2ad5e72dd9fcf not found: ID does not exist" Oct 07 08:15:38 crc kubenswrapper[4875]: I1007 08:15:38.692152 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f2915cba-5b63-4c46-8736-57aa976e2a67-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"f2915cba-5b63-4c46-8736-57aa976e2a67\") " pod="openstack/nova-metadata-0" Oct 07 08:15:38 crc kubenswrapper[4875]: I1007 08:15:38.692221 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4gqft\" (UniqueName: \"kubernetes.io/projected/f2915cba-5b63-4c46-8736-57aa976e2a67-kube-api-access-4gqft\") pod \"nova-metadata-0\" (UID: \"f2915cba-5b63-4c46-8736-57aa976e2a67\") " pod="openstack/nova-metadata-0" Oct 07 08:15:38 crc kubenswrapper[4875]: I1007 08:15:38.692276 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f2915cba-5b63-4c46-8736-57aa976e2a67-logs\") pod \"nova-metadata-0\" (UID: \"f2915cba-5b63-4c46-8736-57aa976e2a67\") " pod="openstack/nova-metadata-0" Oct 07 08:15:38 crc kubenswrapper[4875]: I1007 08:15:38.692349 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/f2915cba-5b63-4c46-8736-57aa976e2a67-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"f2915cba-5b63-4c46-8736-57aa976e2a67\") " pod="openstack/nova-metadata-0" Oct 07 08:15:38 crc kubenswrapper[4875]: I1007 08:15:38.692426 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f2915cba-5b63-4c46-8736-57aa976e2a67-config-data\") pod \"nova-metadata-0\" (UID: \"f2915cba-5b63-4c46-8736-57aa976e2a67\") " pod="openstack/nova-metadata-0" Oct 07 08:15:38 crc kubenswrapper[4875]: I1007 08:15:38.794463 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/f2915cba-5b63-4c46-8736-57aa976e2a67-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"f2915cba-5b63-4c46-8736-57aa976e2a67\") " pod="openstack/nova-metadata-0" Oct 07 08:15:38 crc kubenswrapper[4875]: I1007 08:15:38.794589 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f2915cba-5b63-4c46-8736-57aa976e2a67-config-data\") pod \"nova-metadata-0\" (UID: \"f2915cba-5b63-4c46-8736-57aa976e2a67\") " pod="openstack/nova-metadata-0" Oct 07 08:15:38 crc kubenswrapper[4875]: I1007 08:15:38.794745 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f2915cba-5b63-4c46-8736-57aa976e2a67-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"f2915cba-5b63-4c46-8736-57aa976e2a67\") " pod="openstack/nova-metadata-0" Oct 07 08:15:38 crc kubenswrapper[4875]: I1007 08:15:38.794789 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4gqft\" (UniqueName: \"kubernetes.io/projected/f2915cba-5b63-4c46-8736-57aa976e2a67-kube-api-access-4gqft\") pod \"nova-metadata-0\" (UID: \"f2915cba-5b63-4c46-8736-57aa976e2a67\") " pod="openstack/nova-metadata-0" Oct 07 08:15:38 crc kubenswrapper[4875]: I1007 08:15:38.794867 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f2915cba-5b63-4c46-8736-57aa976e2a67-logs\") pod \"nova-metadata-0\" (UID: \"f2915cba-5b63-4c46-8736-57aa976e2a67\") " pod="openstack/nova-metadata-0" Oct 07 08:15:38 crc kubenswrapper[4875]: I1007 08:15:38.797753 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f2915cba-5b63-4c46-8736-57aa976e2a67-logs\") pod \"nova-metadata-0\" (UID: \"f2915cba-5b63-4c46-8736-57aa976e2a67\") " pod="openstack/nova-metadata-0" Oct 07 08:15:38 crc kubenswrapper[4875]: I1007 08:15:38.801688 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f2915cba-5b63-4c46-8736-57aa976e2a67-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"f2915cba-5b63-4c46-8736-57aa976e2a67\") " pod="openstack/nova-metadata-0" Oct 07 08:15:38 crc kubenswrapper[4875]: I1007 08:15:38.802067 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/f2915cba-5b63-4c46-8736-57aa976e2a67-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"f2915cba-5b63-4c46-8736-57aa976e2a67\") " pod="openstack/nova-metadata-0" Oct 07 08:15:38 crc kubenswrapper[4875]: I1007 08:15:38.811960 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f2915cba-5b63-4c46-8736-57aa976e2a67-config-data\") pod \"nova-metadata-0\" (UID: \"f2915cba-5b63-4c46-8736-57aa976e2a67\") " pod="openstack/nova-metadata-0" Oct 07 08:15:38 crc kubenswrapper[4875]: I1007 08:15:38.820577 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4gqft\" (UniqueName: \"kubernetes.io/projected/f2915cba-5b63-4c46-8736-57aa976e2a67-kube-api-access-4gqft\") pod \"nova-metadata-0\" (UID: \"f2915cba-5b63-4c46-8736-57aa976e2a67\") " pod="openstack/nova-metadata-0" Oct 07 08:15:38 crc kubenswrapper[4875]: I1007 08:15:38.939653 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 07 08:15:39 crc kubenswrapper[4875]: I1007 08:15:39.431762 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 07 08:15:39 crc kubenswrapper[4875]: W1007 08:15:39.442268 4875 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf2915cba_5b63_4c46_8736_57aa976e2a67.slice/crio-9b72d12adf8b585079f4975cc33de821f8d6de0b4103758dac9c5bfa5b7a40d4 WatchSource:0}: Error finding container 9b72d12adf8b585079f4975cc33de821f8d6de0b4103758dac9c5bfa5b7a40d4: Status 404 returned error can't find the container with id 9b72d12adf8b585079f4975cc33de821f8d6de0b4103758dac9c5bfa5b7a40d4 Oct 07 08:15:39 crc kubenswrapper[4875]: I1007 08:15:39.527995 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"f2915cba-5b63-4c46-8736-57aa976e2a67","Type":"ContainerStarted","Data":"9b72d12adf8b585079f4975cc33de821f8d6de0b4103758dac9c5bfa5b7a40d4"} Oct 07 08:15:39 crc kubenswrapper[4875]: I1007 08:15:39.720524 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="556c4447-afdd-414a-b68b-48bb3bc285f1" path="/var/lib/kubelet/pods/556c4447-afdd-414a-b68b-48bb3bc285f1/volumes" Oct 07 08:15:40 crc kubenswrapper[4875]: I1007 08:15:40.467058 4875 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Oct 07 08:15:40 crc kubenswrapper[4875]: I1007 08:15:40.467511 4875 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Oct 07 08:15:40 crc kubenswrapper[4875]: I1007 08:15:40.542897 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"f2915cba-5b63-4c46-8736-57aa976e2a67","Type":"ContainerStarted","Data":"38ca0e03d6dcdd428ac865b51b16d71369cf38674dfd67e661bc4965d5f430de"} Oct 07 08:15:40 crc kubenswrapper[4875]: I1007 08:15:40.542948 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"f2915cba-5b63-4c46-8736-57aa976e2a67","Type":"ContainerStarted","Data":"a4ef6fabaf7b1fc0942db58c48cedba8ebb93c593f37e5473bc65901b25ed0f8"} Oct 07 08:15:40 crc kubenswrapper[4875]: I1007 08:15:40.546470 4875 generic.go:334] "Generic (PLEG): container finished" podID="31deb1ca-dafe-4134-bf0c-d8357153d8ed" containerID="8a38f71ca8f0da6b4b2b8e740f3d07ee88fe560e47d3a66f9bb4a9eb24543eb7" exitCode=0 Oct 07 08:15:40 crc kubenswrapper[4875]: I1007 08:15:40.546538 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-vdpxw" event={"ID":"31deb1ca-dafe-4134-bf0c-d8357153d8ed","Type":"ContainerDied","Data":"8a38f71ca8f0da6b4b2b8e740f3d07ee88fe560e47d3a66f9bb4a9eb24543eb7"} Oct 07 08:15:40 crc kubenswrapper[4875]: I1007 08:15:40.574754 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.574729734 podStartE2EDuration="2.574729734s" podCreationTimestamp="2025-10-07 08:15:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 08:15:40.560793748 +0000 UTC m=+1165.520564291" watchObservedRunningTime="2025-10-07 08:15:40.574729734 +0000 UTC m=+1165.534500277" Oct 07 08:15:40 crc kubenswrapper[4875]: I1007 08:15:40.749985 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Oct 07 08:15:40 crc kubenswrapper[4875]: I1007 08:15:40.856543 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Oct 07 08:15:40 crc kubenswrapper[4875]: I1007 08:15:40.856597 4875 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Oct 07 08:15:40 crc kubenswrapper[4875]: I1007 08:15:40.891182 4875 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Oct 07 08:15:40 crc kubenswrapper[4875]: I1007 08:15:40.895169 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-bccf8f775-vp4dv" Oct 07 08:15:41 crc kubenswrapper[4875]: I1007 08:15:41.012094 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6578955fd5-6k89r"] Oct 07 08:15:41 crc kubenswrapper[4875]: I1007 08:15:41.021445 4875 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-6578955fd5-6k89r" podUID="b243a457-ccd1-4c3d-b798-9924c6dfcf45" containerName="dnsmasq-dns" containerID="cri-o://897fb7517eb2debc16b8c39a152333b706dd69e4dfabbeb58ea46b519c6cea95" gracePeriod=10 Oct 07 08:15:41 crc kubenswrapper[4875]: I1007 08:15:41.549340 4875 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="ea3e6a51-61d8-4931-9a0b-6b49839a6f30" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.187:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 07 08:15:41 crc kubenswrapper[4875]: I1007 08:15:41.549347 4875 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="ea3e6a51-61d8-4931-9a0b-6b49839a6f30" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.187:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 07 08:15:41 crc kubenswrapper[4875]: I1007 08:15:41.573326 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6578955fd5-6k89r" Oct 07 08:15:41 crc kubenswrapper[4875]: I1007 08:15:41.574061 4875 generic.go:334] "Generic (PLEG): container finished" podID="b243a457-ccd1-4c3d-b798-9924c6dfcf45" containerID="897fb7517eb2debc16b8c39a152333b706dd69e4dfabbeb58ea46b519c6cea95" exitCode=0 Oct 07 08:15:41 crc kubenswrapper[4875]: I1007 08:15:41.574133 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6578955fd5-6k89r" event={"ID":"b243a457-ccd1-4c3d-b798-9924c6dfcf45","Type":"ContainerDied","Data":"897fb7517eb2debc16b8c39a152333b706dd69e4dfabbeb58ea46b519c6cea95"} Oct 07 08:15:41 crc kubenswrapper[4875]: I1007 08:15:41.574164 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6578955fd5-6k89r" event={"ID":"b243a457-ccd1-4c3d-b798-9924c6dfcf45","Type":"ContainerDied","Data":"7ae4c7c744128a5e7f889669c93a7c0b54e55194d559793a03e441b2cc68abed"} Oct 07 08:15:41 crc kubenswrapper[4875]: I1007 08:15:41.574186 4875 scope.go:117] "RemoveContainer" containerID="897fb7517eb2debc16b8c39a152333b706dd69e4dfabbeb58ea46b519c6cea95" Oct 07 08:15:41 crc kubenswrapper[4875]: I1007 08:15:41.577222 4875 generic.go:334] "Generic (PLEG): container finished" podID="bf652b8b-3c7e-4673-a2f4-4af434e1ce0b" containerID="83d348756739527f131afb9ecef42e53c72b8631d7429e118e7f7ce6fb7335cd" exitCode=0 Oct 07 08:15:41 crc kubenswrapper[4875]: I1007 08:15:41.578121 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-cdldk" event={"ID":"bf652b8b-3c7e-4673-a2f4-4af434e1ce0b","Type":"ContainerDied","Data":"83d348756739527f131afb9ecef42e53c72b8631d7429e118e7f7ce6fb7335cd"} Oct 07 08:15:41 crc kubenswrapper[4875]: I1007 08:15:41.610026 4875 scope.go:117] "RemoveContainer" containerID="210bff39d1d222ec1fcdd2b15e45b62612aed51d11fbf4892bdf0fb6bdd8c7cb" Oct 07 08:15:41 crc kubenswrapper[4875]: I1007 08:15:41.656590 4875 scope.go:117] "RemoveContainer" containerID="897fb7517eb2debc16b8c39a152333b706dd69e4dfabbeb58ea46b519c6cea95" Oct 07 08:15:41 crc kubenswrapper[4875]: E1007 08:15:41.660381 4875 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"897fb7517eb2debc16b8c39a152333b706dd69e4dfabbeb58ea46b519c6cea95\": container with ID starting with 897fb7517eb2debc16b8c39a152333b706dd69e4dfabbeb58ea46b519c6cea95 not found: ID does not exist" containerID="897fb7517eb2debc16b8c39a152333b706dd69e4dfabbeb58ea46b519c6cea95" Oct 07 08:15:41 crc kubenswrapper[4875]: I1007 08:15:41.660470 4875 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"897fb7517eb2debc16b8c39a152333b706dd69e4dfabbeb58ea46b519c6cea95"} err="failed to get container status \"897fb7517eb2debc16b8c39a152333b706dd69e4dfabbeb58ea46b519c6cea95\": rpc error: code = NotFound desc = could not find container \"897fb7517eb2debc16b8c39a152333b706dd69e4dfabbeb58ea46b519c6cea95\": container with ID starting with 897fb7517eb2debc16b8c39a152333b706dd69e4dfabbeb58ea46b519c6cea95 not found: ID does not exist" Oct 07 08:15:41 crc kubenswrapper[4875]: I1007 08:15:41.660543 4875 scope.go:117] "RemoveContainer" containerID="210bff39d1d222ec1fcdd2b15e45b62612aed51d11fbf4892bdf0fb6bdd8c7cb" Oct 07 08:15:41 crc kubenswrapper[4875]: E1007 08:15:41.661471 4875 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"210bff39d1d222ec1fcdd2b15e45b62612aed51d11fbf4892bdf0fb6bdd8c7cb\": container with ID starting with 210bff39d1d222ec1fcdd2b15e45b62612aed51d11fbf4892bdf0fb6bdd8c7cb not found: ID does not exist" containerID="210bff39d1d222ec1fcdd2b15e45b62612aed51d11fbf4892bdf0fb6bdd8c7cb" Oct 07 08:15:41 crc kubenswrapper[4875]: I1007 08:15:41.661550 4875 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"210bff39d1d222ec1fcdd2b15e45b62612aed51d11fbf4892bdf0fb6bdd8c7cb"} err="failed to get container status \"210bff39d1d222ec1fcdd2b15e45b62612aed51d11fbf4892bdf0fb6bdd8c7cb\": rpc error: code = NotFound desc = could not find container \"210bff39d1d222ec1fcdd2b15e45b62612aed51d11fbf4892bdf0fb6bdd8c7cb\": container with ID starting with 210bff39d1d222ec1fcdd2b15e45b62612aed51d11fbf4892bdf0fb6bdd8c7cb not found: ID does not exist" Oct 07 08:15:41 crc kubenswrapper[4875]: I1007 08:15:41.663676 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b243a457-ccd1-4c3d-b798-9924c6dfcf45-ovsdbserver-sb\") pod \"b243a457-ccd1-4c3d-b798-9924c6dfcf45\" (UID: \"b243a457-ccd1-4c3d-b798-9924c6dfcf45\") " Oct 07 08:15:41 crc kubenswrapper[4875]: I1007 08:15:41.663839 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b243a457-ccd1-4c3d-b798-9924c6dfcf45-dns-svc\") pod \"b243a457-ccd1-4c3d-b798-9924c6dfcf45\" (UID: \"b243a457-ccd1-4c3d-b798-9924c6dfcf45\") " Oct 07 08:15:41 crc kubenswrapper[4875]: I1007 08:15:41.663862 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b243a457-ccd1-4c3d-b798-9924c6dfcf45-ovsdbserver-nb\") pod \"b243a457-ccd1-4c3d-b798-9924c6dfcf45\" (UID: \"b243a457-ccd1-4c3d-b798-9924c6dfcf45\") " Oct 07 08:15:41 crc kubenswrapper[4875]: I1007 08:15:41.663975 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jdcwn\" (UniqueName: \"kubernetes.io/projected/b243a457-ccd1-4c3d-b798-9924c6dfcf45-kube-api-access-jdcwn\") pod \"b243a457-ccd1-4c3d-b798-9924c6dfcf45\" (UID: \"b243a457-ccd1-4c3d-b798-9924c6dfcf45\") " Oct 07 08:15:41 crc kubenswrapper[4875]: I1007 08:15:41.664018 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/b243a457-ccd1-4c3d-b798-9924c6dfcf45-dns-swift-storage-0\") pod \"b243a457-ccd1-4c3d-b798-9924c6dfcf45\" (UID: \"b243a457-ccd1-4c3d-b798-9924c6dfcf45\") " Oct 07 08:15:41 crc kubenswrapper[4875]: I1007 08:15:41.664106 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b243a457-ccd1-4c3d-b798-9924c6dfcf45-config\") pod \"b243a457-ccd1-4c3d-b798-9924c6dfcf45\" (UID: \"b243a457-ccd1-4c3d-b798-9924c6dfcf45\") " Oct 07 08:15:41 crc kubenswrapper[4875]: I1007 08:15:41.666737 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Oct 07 08:15:41 crc kubenswrapper[4875]: I1007 08:15:41.677343 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b243a457-ccd1-4c3d-b798-9924c6dfcf45-kube-api-access-jdcwn" (OuterVolumeSpecName: "kube-api-access-jdcwn") pod "b243a457-ccd1-4c3d-b798-9924c6dfcf45" (UID: "b243a457-ccd1-4c3d-b798-9924c6dfcf45"). InnerVolumeSpecName "kube-api-access-jdcwn". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 08:15:41 crc kubenswrapper[4875]: I1007 08:15:41.768648 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jdcwn\" (UniqueName: \"kubernetes.io/projected/b243a457-ccd1-4c3d-b798-9924c6dfcf45-kube-api-access-jdcwn\") on node \"crc\" DevicePath \"\"" Oct 07 08:15:41 crc kubenswrapper[4875]: I1007 08:15:41.783769 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b243a457-ccd1-4c3d-b798-9924c6dfcf45-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "b243a457-ccd1-4c3d-b798-9924c6dfcf45" (UID: "b243a457-ccd1-4c3d-b798-9924c6dfcf45"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 08:15:41 crc kubenswrapper[4875]: I1007 08:15:41.783839 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b243a457-ccd1-4c3d-b798-9924c6dfcf45-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "b243a457-ccd1-4c3d-b798-9924c6dfcf45" (UID: "b243a457-ccd1-4c3d-b798-9924c6dfcf45"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 08:15:41 crc kubenswrapper[4875]: I1007 08:15:41.789747 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b243a457-ccd1-4c3d-b798-9924c6dfcf45-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "b243a457-ccd1-4c3d-b798-9924c6dfcf45" (UID: "b243a457-ccd1-4c3d-b798-9924c6dfcf45"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 08:15:41 crc kubenswrapper[4875]: I1007 08:15:41.813304 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b243a457-ccd1-4c3d-b798-9924c6dfcf45-config" (OuterVolumeSpecName: "config") pod "b243a457-ccd1-4c3d-b798-9924c6dfcf45" (UID: "b243a457-ccd1-4c3d-b798-9924c6dfcf45"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 08:15:41 crc kubenswrapper[4875]: I1007 08:15:41.813948 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b243a457-ccd1-4c3d-b798-9924c6dfcf45-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "b243a457-ccd1-4c3d-b798-9924c6dfcf45" (UID: "b243a457-ccd1-4c3d-b798-9924c6dfcf45"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 08:15:41 crc kubenswrapper[4875]: I1007 08:15:41.870378 4875 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b243a457-ccd1-4c3d-b798-9924c6dfcf45-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 07 08:15:41 crc kubenswrapper[4875]: I1007 08:15:41.871039 4875 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b243a457-ccd1-4c3d-b798-9924c6dfcf45-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 07 08:15:41 crc kubenswrapper[4875]: I1007 08:15:41.871053 4875 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b243a457-ccd1-4c3d-b798-9924c6dfcf45-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 07 08:15:41 crc kubenswrapper[4875]: I1007 08:15:41.871068 4875 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/b243a457-ccd1-4c3d-b798-9924c6dfcf45-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Oct 07 08:15:41 crc kubenswrapper[4875]: I1007 08:15:41.871081 4875 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b243a457-ccd1-4c3d-b798-9924c6dfcf45-config\") on node \"crc\" DevicePath \"\"" Oct 07 08:15:41 crc kubenswrapper[4875]: I1007 08:15:41.943630 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-vdpxw" Oct 07 08:15:41 crc kubenswrapper[4875]: I1007 08:15:41.973093 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/31deb1ca-dafe-4134-bf0c-d8357153d8ed-config-data\") pod \"31deb1ca-dafe-4134-bf0c-d8357153d8ed\" (UID: \"31deb1ca-dafe-4134-bf0c-d8357153d8ed\") " Oct 07 08:15:41 crc kubenswrapper[4875]: I1007 08:15:41.973299 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ztndc\" (UniqueName: \"kubernetes.io/projected/31deb1ca-dafe-4134-bf0c-d8357153d8ed-kube-api-access-ztndc\") pod \"31deb1ca-dafe-4134-bf0c-d8357153d8ed\" (UID: \"31deb1ca-dafe-4134-bf0c-d8357153d8ed\") " Oct 07 08:15:41 crc kubenswrapper[4875]: I1007 08:15:41.973436 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/31deb1ca-dafe-4134-bf0c-d8357153d8ed-combined-ca-bundle\") pod \"31deb1ca-dafe-4134-bf0c-d8357153d8ed\" (UID: \"31deb1ca-dafe-4134-bf0c-d8357153d8ed\") " Oct 07 08:15:41 crc kubenswrapper[4875]: I1007 08:15:41.973468 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/31deb1ca-dafe-4134-bf0c-d8357153d8ed-scripts\") pod \"31deb1ca-dafe-4134-bf0c-d8357153d8ed\" (UID: \"31deb1ca-dafe-4134-bf0c-d8357153d8ed\") " Oct 07 08:15:41 crc kubenswrapper[4875]: I1007 08:15:41.978573 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31deb1ca-dafe-4134-bf0c-d8357153d8ed-scripts" (OuterVolumeSpecName: "scripts") pod "31deb1ca-dafe-4134-bf0c-d8357153d8ed" (UID: "31deb1ca-dafe-4134-bf0c-d8357153d8ed"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:15:41 crc kubenswrapper[4875]: I1007 08:15:41.978862 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31deb1ca-dafe-4134-bf0c-d8357153d8ed-kube-api-access-ztndc" (OuterVolumeSpecName: "kube-api-access-ztndc") pod "31deb1ca-dafe-4134-bf0c-d8357153d8ed" (UID: "31deb1ca-dafe-4134-bf0c-d8357153d8ed"). InnerVolumeSpecName "kube-api-access-ztndc". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 08:15:42 crc kubenswrapper[4875]: I1007 08:15:42.006252 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31deb1ca-dafe-4134-bf0c-d8357153d8ed-config-data" (OuterVolumeSpecName: "config-data") pod "31deb1ca-dafe-4134-bf0c-d8357153d8ed" (UID: "31deb1ca-dafe-4134-bf0c-d8357153d8ed"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:15:42 crc kubenswrapper[4875]: I1007 08:15:42.016083 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31deb1ca-dafe-4134-bf0c-d8357153d8ed-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "31deb1ca-dafe-4134-bf0c-d8357153d8ed" (UID: "31deb1ca-dafe-4134-bf0c-d8357153d8ed"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:15:42 crc kubenswrapper[4875]: I1007 08:15:42.076316 4875 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/31deb1ca-dafe-4134-bf0c-d8357153d8ed-config-data\") on node \"crc\" DevicePath \"\"" Oct 07 08:15:42 crc kubenswrapper[4875]: I1007 08:15:42.076363 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ztndc\" (UniqueName: \"kubernetes.io/projected/31deb1ca-dafe-4134-bf0c-d8357153d8ed-kube-api-access-ztndc\") on node \"crc\" DevicePath \"\"" Oct 07 08:15:42 crc kubenswrapper[4875]: I1007 08:15:42.076378 4875 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/31deb1ca-dafe-4134-bf0c-d8357153d8ed-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 08:15:42 crc kubenswrapper[4875]: I1007 08:15:42.076387 4875 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/31deb1ca-dafe-4134-bf0c-d8357153d8ed-scripts\") on node \"crc\" DevicePath \"\"" Oct 07 08:15:42 crc kubenswrapper[4875]: I1007 08:15:42.602861 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6578955fd5-6k89r" Oct 07 08:15:42 crc kubenswrapper[4875]: I1007 08:15:42.606910 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-vdpxw" Oct 07 08:15:42 crc kubenswrapper[4875]: I1007 08:15:42.606914 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-vdpxw" event={"ID":"31deb1ca-dafe-4134-bf0c-d8357153d8ed","Type":"ContainerDied","Data":"42c5ec0b86f7788c33739596c0af0ed1c71ff0e88068581eadf03391c73075a6"} Oct 07 08:15:42 crc kubenswrapper[4875]: I1007 08:15:42.607175 4875 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="42c5ec0b86f7788c33739596c0af0ed1c71ff0e88068581eadf03391c73075a6" Oct 07 08:15:42 crc kubenswrapper[4875]: I1007 08:15:42.666112 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6578955fd5-6k89r"] Oct 07 08:15:42 crc kubenswrapper[4875]: I1007 08:15:42.671428 4875 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6578955fd5-6k89r"] Oct 07 08:15:42 crc kubenswrapper[4875]: I1007 08:15:42.786389 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Oct 07 08:15:42 crc kubenswrapper[4875]: I1007 08:15:42.786621 4875 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="ea3e6a51-61d8-4931-9a0b-6b49839a6f30" containerName="nova-api-log" containerID="cri-o://845f5361463692e00f406a302ba10885ef0daf84aac41c5c3b75abfd79f11b5c" gracePeriod=30 Oct 07 08:15:42 crc kubenswrapper[4875]: I1007 08:15:42.787055 4875 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="ea3e6a51-61d8-4931-9a0b-6b49839a6f30" containerName="nova-api-api" containerID="cri-o://ef5c55196f486fcaf605c9a56b822a4250f48fd2dec2e0d1ca942077e9642376" gracePeriod=30 Oct 07 08:15:42 crc kubenswrapper[4875]: I1007 08:15:42.826456 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Oct 07 08:15:42 crc kubenswrapper[4875]: I1007 08:15:42.884846 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Oct 07 08:15:42 crc kubenswrapper[4875]: I1007 08:15:42.885361 4875 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="f2915cba-5b63-4c46-8736-57aa976e2a67" containerName="nova-metadata-log" containerID="cri-o://a4ef6fabaf7b1fc0942db58c48cedba8ebb93c593f37e5473bc65901b25ed0f8" gracePeriod=30 Oct 07 08:15:42 crc kubenswrapper[4875]: I1007 08:15:42.886121 4875 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="f2915cba-5b63-4c46-8736-57aa976e2a67" containerName="nova-metadata-metadata" containerID="cri-o://38ca0e03d6dcdd428ac865b51b16d71369cf38674dfd67e661bc4965d5f430de" gracePeriod=30 Oct 07 08:15:43 crc kubenswrapper[4875]: I1007 08:15:43.225829 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-cdldk" Oct 07 08:15:43 crc kubenswrapper[4875]: I1007 08:15:43.308961 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bf652b8b-3c7e-4673-a2f4-4af434e1ce0b-config-data\") pod \"bf652b8b-3c7e-4673-a2f4-4af434e1ce0b\" (UID: \"bf652b8b-3c7e-4673-a2f4-4af434e1ce0b\") " Oct 07 08:15:43 crc kubenswrapper[4875]: I1007 08:15:43.309152 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bf652b8b-3c7e-4673-a2f4-4af434e1ce0b-combined-ca-bundle\") pod \"bf652b8b-3c7e-4673-a2f4-4af434e1ce0b\" (UID: \"bf652b8b-3c7e-4673-a2f4-4af434e1ce0b\") " Oct 07 08:15:43 crc kubenswrapper[4875]: I1007 08:15:43.309288 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8bhd2\" (UniqueName: \"kubernetes.io/projected/bf652b8b-3c7e-4673-a2f4-4af434e1ce0b-kube-api-access-8bhd2\") pod \"bf652b8b-3c7e-4673-a2f4-4af434e1ce0b\" (UID: \"bf652b8b-3c7e-4673-a2f4-4af434e1ce0b\") " Oct 07 08:15:43 crc kubenswrapper[4875]: I1007 08:15:43.309423 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bf652b8b-3c7e-4673-a2f4-4af434e1ce0b-scripts\") pod \"bf652b8b-3c7e-4673-a2f4-4af434e1ce0b\" (UID: \"bf652b8b-3c7e-4673-a2f4-4af434e1ce0b\") " Oct 07 08:15:43 crc kubenswrapper[4875]: I1007 08:15:43.319678 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf652b8b-3c7e-4673-a2f4-4af434e1ce0b-scripts" (OuterVolumeSpecName: "scripts") pod "bf652b8b-3c7e-4673-a2f4-4af434e1ce0b" (UID: "bf652b8b-3c7e-4673-a2f4-4af434e1ce0b"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:15:43 crc kubenswrapper[4875]: I1007 08:15:43.329901 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf652b8b-3c7e-4673-a2f4-4af434e1ce0b-kube-api-access-8bhd2" (OuterVolumeSpecName: "kube-api-access-8bhd2") pod "bf652b8b-3c7e-4673-a2f4-4af434e1ce0b" (UID: "bf652b8b-3c7e-4673-a2f4-4af434e1ce0b"). InnerVolumeSpecName "kube-api-access-8bhd2". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 08:15:43 crc kubenswrapper[4875]: I1007 08:15:43.364898 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf652b8b-3c7e-4673-a2f4-4af434e1ce0b-config-data" (OuterVolumeSpecName: "config-data") pod "bf652b8b-3c7e-4673-a2f4-4af434e1ce0b" (UID: "bf652b8b-3c7e-4673-a2f4-4af434e1ce0b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:15:43 crc kubenswrapper[4875]: I1007 08:15:43.374993 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf652b8b-3c7e-4673-a2f4-4af434e1ce0b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "bf652b8b-3c7e-4673-a2f4-4af434e1ce0b" (UID: "bf652b8b-3c7e-4673-a2f4-4af434e1ce0b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:15:43 crc kubenswrapper[4875]: I1007 08:15:43.412150 4875 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bf652b8b-3c7e-4673-a2f4-4af434e1ce0b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 08:15:43 crc kubenswrapper[4875]: I1007 08:15:43.412192 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8bhd2\" (UniqueName: \"kubernetes.io/projected/bf652b8b-3c7e-4673-a2f4-4af434e1ce0b-kube-api-access-8bhd2\") on node \"crc\" DevicePath \"\"" Oct 07 08:15:43 crc kubenswrapper[4875]: I1007 08:15:43.412207 4875 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bf652b8b-3c7e-4673-a2f4-4af434e1ce0b-scripts\") on node \"crc\" DevicePath \"\"" Oct 07 08:15:43 crc kubenswrapper[4875]: I1007 08:15:43.412217 4875 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bf652b8b-3c7e-4673-a2f4-4af434e1ce0b-config-data\") on node \"crc\" DevicePath \"\"" Oct 07 08:15:43 crc kubenswrapper[4875]: I1007 08:15:43.484123 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 07 08:15:43 crc kubenswrapper[4875]: I1007 08:15:43.513193 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f2915cba-5b63-4c46-8736-57aa976e2a67-logs\") pod \"f2915cba-5b63-4c46-8736-57aa976e2a67\" (UID: \"f2915cba-5b63-4c46-8736-57aa976e2a67\") " Oct 07 08:15:43 crc kubenswrapper[4875]: I1007 08:15:43.513273 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f2915cba-5b63-4c46-8736-57aa976e2a67-config-data\") pod \"f2915cba-5b63-4c46-8736-57aa976e2a67\" (UID: \"f2915cba-5b63-4c46-8736-57aa976e2a67\") " Oct 07 08:15:43 crc kubenswrapper[4875]: I1007 08:15:43.513301 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/f2915cba-5b63-4c46-8736-57aa976e2a67-nova-metadata-tls-certs\") pod \"f2915cba-5b63-4c46-8736-57aa976e2a67\" (UID: \"f2915cba-5b63-4c46-8736-57aa976e2a67\") " Oct 07 08:15:43 crc kubenswrapper[4875]: I1007 08:15:43.513531 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4gqft\" (UniqueName: \"kubernetes.io/projected/f2915cba-5b63-4c46-8736-57aa976e2a67-kube-api-access-4gqft\") pod \"f2915cba-5b63-4c46-8736-57aa976e2a67\" (UID: \"f2915cba-5b63-4c46-8736-57aa976e2a67\") " Oct 07 08:15:43 crc kubenswrapper[4875]: I1007 08:15:43.513562 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f2915cba-5b63-4c46-8736-57aa976e2a67-combined-ca-bundle\") pod \"f2915cba-5b63-4c46-8736-57aa976e2a67\" (UID: \"f2915cba-5b63-4c46-8736-57aa976e2a67\") " Oct 07 08:15:43 crc kubenswrapper[4875]: I1007 08:15:43.514167 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f2915cba-5b63-4c46-8736-57aa976e2a67-logs" (OuterVolumeSpecName: "logs") pod "f2915cba-5b63-4c46-8736-57aa976e2a67" (UID: "f2915cba-5b63-4c46-8736-57aa976e2a67"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 08:15:43 crc kubenswrapper[4875]: I1007 08:15:43.520305 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f2915cba-5b63-4c46-8736-57aa976e2a67-kube-api-access-4gqft" (OuterVolumeSpecName: "kube-api-access-4gqft") pod "f2915cba-5b63-4c46-8736-57aa976e2a67" (UID: "f2915cba-5b63-4c46-8736-57aa976e2a67"). InnerVolumeSpecName "kube-api-access-4gqft". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 08:15:43 crc kubenswrapper[4875]: I1007 08:15:43.551173 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f2915cba-5b63-4c46-8736-57aa976e2a67-config-data" (OuterVolumeSpecName: "config-data") pod "f2915cba-5b63-4c46-8736-57aa976e2a67" (UID: "f2915cba-5b63-4c46-8736-57aa976e2a67"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:15:43 crc kubenswrapper[4875]: I1007 08:15:43.553396 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f2915cba-5b63-4c46-8736-57aa976e2a67-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f2915cba-5b63-4c46-8736-57aa976e2a67" (UID: "f2915cba-5b63-4c46-8736-57aa976e2a67"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:15:43 crc kubenswrapper[4875]: I1007 08:15:43.583644 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f2915cba-5b63-4c46-8736-57aa976e2a67-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "f2915cba-5b63-4c46-8736-57aa976e2a67" (UID: "f2915cba-5b63-4c46-8736-57aa976e2a67"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:15:43 crc kubenswrapper[4875]: I1007 08:15:43.616079 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4gqft\" (UniqueName: \"kubernetes.io/projected/f2915cba-5b63-4c46-8736-57aa976e2a67-kube-api-access-4gqft\") on node \"crc\" DevicePath \"\"" Oct 07 08:15:43 crc kubenswrapper[4875]: I1007 08:15:43.616118 4875 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f2915cba-5b63-4c46-8736-57aa976e2a67-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 08:15:43 crc kubenswrapper[4875]: I1007 08:15:43.616129 4875 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f2915cba-5b63-4c46-8736-57aa976e2a67-logs\") on node \"crc\" DevicePath \"\"" Oct 07 08:15:43 crc kubenswrapper[4875]: I1007 08:15:43.616140 4875 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f2915cba-5b63-4c46-8736-57aa976e2a67-config-data\") on node \"crc\" DevicePath \"\"" Oct 07 08:15:43 crc kubenswrapper[4875]: I1007 08:15:43.616156 4875 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/f2915cba-5b63-4c46-8736-57aa976e2a67-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 07 08:15:43 crc kubenswrapper[4875]: I1007 08:15:43.622378 4875 generic.go:334] "Generic (PLEG): container finished" podID="ea3e6a51-61d8-4931-9a0b-6b49839a6f30" containerID="845f5361463692e00f406a302ba10885ef0daf84aac41c5c3b75abfd79f11b5c" exitCode=143 Oct 07 08:15:43 crc kubenswrapper[4875]: I1007 08:15:43.622480 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"ea3e6a51-61d8-4931-9a0b-6b49839a6f30","Type":"ContainerDied","Data":"845f5361463692e00f406a302ba10885ef0daf84aac41c5c3b75abfd79f11b5c"} Oct 07 08:15:43 crc kubenswrapper[4875]: I1007 08:15:43.627972 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-cdldk" event={"ID":"bf652b8b-3c7e-4673-a2f4-4af434e1ce0b","Type":"ContainerDied","Data":"c62a6325a6e82b4789966d169c392aa910253907d10d3da27b83170ae0887dfc"} Oct 07 08:15:43 crc kubenswrapper[4875]: I1007 08:15:43.628036 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-cdldk" Oct 07 08:15:43 crc kubenswrapper[4875]: I1007 08:15:43.628066 4875 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c62a6325a6e82b4789966d169c392aa910253907d10d3da27b83170ae0887dfc" Oct 07 08:15:43 crc kubenswrapper[4875]: I1007 08:15:43.630385 4875 generic.go:334] "Generic (PLEG): container finished" podID="f2915cba-5b63-4c46-8736-57aa976e2a67" containerID="38ca0e03d6dcdd428ac865b51b16d71369cf38674dfd67e661bc4965d5f430de" exitCode=0 Oct 07 08:15:43 crc kubenswrapper[4875]: I1007 08:15:43.630411 4875 generic.go:334] "Generic (PLEG): container finished" podID="f2915cba-5b63-4c46-8736-57aa976e2a67" containerID="a4ef6fabaf7b1fc0942db58c48cedba8ebb93c593f37e5473bc65901b25ed0f8" exitCode=143 Oct 07 08:15:43 crc kubenswrapper[4875]: I1007 08:15:43.630522 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 07 08:15:43 crc kubenswrapper[4875]: I1007 08:15:43.630582 4875 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="2184acaa-b7d5-43ac-ae72-aafb225f40fe" containerName="nova-scheduler-scheduler" containerID="cri-o://063882deb3100b6172c24e106eab4590eaae32739f6d6c4b08e07b63af27b6b9" gracePeriod=30 Oct 07 08:15:43 crc kubenswrapper[4875]: I1007 08:15:43.630668 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"f2915cba-5b63-4c46-8736-57aa976e2a67","Type":"ContainerDied","Data":"38ca0e03d6dcdd428ac865b51b16d71369cf38674dfd67e661bc4965d5f430de"} Oct 07 08:15:43 crc kubenswrapper[4875]: I1007 08:15:43.630695 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"f2915cba-5b63-4c46-8736-57aa976e2a67","Type":"ContainerDied","Data":"a4ef6fabaf7b1fc0942db58c48cedba8ebb93c593f37e5473bc65901b25ed0f8"} Oct 07 08:15:43 crc kubenswrapper[4875]: I1007 08:15:43.630708 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"f2915cba-5b63-4c46-8736-57aa976e2a67","Type":"ContainerDied","Data":"9b72d12adf8b585079f4975cc33de821f8d6de0b4103758dac9c5bfa5b7a40d4"} Oct 07 08:15:43 crc kubenswrapper[4875]: I1007 08:15:43.630729 4875 scope.go:117] "RemoveContainer" containerID="38ca0e03d6dcdd428ac865b51b16d71369cf38674dfd67e661bc4965d5f430de" Oct 07 08:15:43 crc kubenswrapper[4875]: I1007 08:15:43.661391 4875 scope.go:117] "RemoveContainer" containerID="a4ef6fabaf7b1fc0942db58c48cedba8ebb93c593f37e5473bc65901b25ed0f8" Oct 07 08:15:43 crc kubenswrapper[4875]: I1007 08:15:43.733233 4875 scope.go:117] "RemoveContainer" containerID="38ca0e03d6dcdd428ac865b51b16d71369cf38674dfd67e661bc4965d5f430de" Oct 07 08:15:43 crc kubenswrapper[4875]: I1007 08:15:43.743464 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b243a457-ccd1-4c3d-b798-9924c6dfcf45" path="/var/lib/kubelet/pods/b243a457-ccd1-4c3d-b798-9924c6dfcf45/volumes" Oct 07 08:15:43 crc kubenswrapper[4875]: E1007 08:15:43.753949 4875 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"38ca0e03d6dcdd428ac865b51b16d71369cf38674dfd67e661bc4965d5f430de\": container with ID starting with 38ca0e03d6dcdd428ac865b51b16d71369cf38674dfd67e661bc4965d5f430de not found: ID does not exist" containerID="38ca0e03d6dcdd428ac865b51b16d71369cf38674dfd67e661bc4965d5f430de" Oct 07 08:15:43 crc kubenswrapper[4875]: I1007 08:15:43.754089 4875 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"38ca0e03d6dcdd428ac865b51b16d71369cf38674dfd67e661bc4965d5f430de"} err="failed to get container status \"38ca0e03d6dcdd428ac865b51b16d71369cf38674dfd67e661bc4965d5f430de\": rpc error: code = NotFound desc = could not find container \"38ca0e03d6dcdd428ac865b51b16d71369cf38674dfd67e661bc4965d5f430de\": container with ID starting with 38ca0e03d6dcdd428ac865b51b16d71369cf38674dfd67e661bc4965d5f430de not found: ID does not exist" Oct 07 08:15:43 crc kubenswrapper[4875]: I1007 08:15:43.754128 4875 scope.go:117] "RemoveContainer" containerID="a4ef6fabaf7b1fc0942db58c48cedba8ebb93c593f37e5473bc65901b25ed0f8" Oct 07 08:15:43 crc kubenswrapper[4875]: E1007 08:15:43.755727 4875 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a4ef6fabaf7b1fc0942db58c48cedba8ebb93c593f37e5473bc65901b25ed0f8\": container with ID starting with a4ef6fabaf7b1fc0942db58c48cedba8ebb93c593f37e5473bc65901b25ed0f8 not found: ID does not exist" containerID="a4ef6fabaf7b1fc0942db58c48cedba8ebb93c593f37e5473bc65901b25ed0f8" Oct 07 08:15:43 crc kubenswrapper[4875]: I1007 08:15:43.755753 4875 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a4ef6fabaf7b1fc0942db58c48cedba8ebb93c593f37e5473bc65901b25ed0f8"} err="failed to get container status \"a4ef6fabaf7b1fc0942db58c48cedba8ebb93c593f37e5473bc65901b25ed0f8\": rpc error: code = NotFound desc = could not find container \"a4ef6fabaf7b1fc0942db58c48cedba8ebb93c593f37e5473bc65901b25ed0f8\": container with ID starting with a4ef6fabaf7b1fc0942db58c48cedba8ebb93c593f37e5473bc65901b25ed0f8 not found: ID does not exist" Oct 07 08:15:43 crc kubenswrapper[4875]: I1007 08:15:43.755774 4875 scope.go:117] "RemoveContainer" containerID="38ca0e03d6dcdd428ac865b51b16d71369cf38674dfd67e661bc4965d5f430de" Oct 07 08:15:43 crc kubenswrapper[4875]: I1007 08:15:43.756040 4875 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"38ca0e03d6dcdd428ac865b51b16d71369cf38674dfd67e661bc4965d5f430de"} err="failed to get container status \"38ca0e03d6dcdd428ac865b51b16d71369cf38674dfd67e661bc4965d5f430de\": rpc error: code = NotFound desc = could not find container \"38ca0e03d6dcdd428ac865b51b16d71369cf38674dfd67e661bc4965d5f430de\": container with ID starting with 38ca0e03d6dcdd428ac865b51b16d71369cf38674dfd67e661bc4965d5f430de not found: ID does not exist" Oct 07 08:15:43 crc kubenswrapper[4875]: I1007 08:15:43.756064 4875 scope.go:117] "RemoveContainer" containerID="a4ef6fabaf7b1fc0942db58c48cedba8ebb93c593f37e5473bc65901b25ed0f8" Oct 07 08:15:43 crc kubenswrapper[4875]: I1007 08:15:43.756713 4875 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a4ef6fabaf7b1fc0942db58c48cedba8ebb93c593f37e5473bc65901b25ed0f8"} err="failed to get container status \"a4ef6fabaf7b1fc0942db58c48cedba8ebb93c593f37e5473bc65901b25ed0f8\": rpc error: code = NotFound desc = could not find container \"a4ef6fabaf7b1fc0942db58c48cedba8ebb93c593f37e5473bc65901b25ed0f8\": container with ID starting with a4ef6fabaf7b1fc0942db58c48cedba8ebb93c593f37e5473bc65901b25ed0f8 not found: ID does not exist" Oct 07 08:15:43 crc kubenswrapper[4875]: I1007 08:15:43.781425 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Oct 07 08:15:43 crc kubenswrapper[4875]: I1007 08:15:43.790768 4875 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Oct 07 08:15:43 crc kubenswrapper[4875]: I1007 08:15:43.810025 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Oct 07 08:15:43 crc kubenswrapper[4875]: E1007 08:15:43.810798 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f2915cba-5b63-4c46-8736-57aa976e2a67" containerName="nova-metadata-log" Oct 07 08:15:43 crc kubenswrapper[4875]: I1007 08:15:43.810824 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="f2915cba-5b63-4c46-8736-57aa976e2a67" containerName="nova-metadata-log" Oct 07 08:15:43 crc kubenswrapper[4875]: E1007 08:15:43.810839 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f2915cba-5b63-4c46-8736-57aa976e2a67" containerName="nova-metadata-metadata" Oct 07 08:15:43 crc kubenswrapper[4875]: I1007 08:15:43.810847 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="f2915cba-5b63-4c46-8736-57aa976e2a67" containerName="nova-metadata-metadata" Oct 07 08:15:43 crc kubenswrapper[4875]: E1007 08:15:43.810863 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bf652b8b-3c7e-4673-a2f4-4af434e1ce0b" containerName="nova-cell1-conductor-db-sync" Oct 07 08:15:43 crc kubenswrapper[4875]: I1007 08:15:43.810870 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="bf652b8b-3c7e-4673-a2f4-4af434e1ce0b" containerName="nova-cell1-conductor-db-sync" Oct 07 08:15:43 crc kubenswrapper[4875]: E1007 08:15:43.810901 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="31deb1ca-dafe-4134-bf0c-d8357153d8ed" containerName="nova-manage" Oct 07 08:15:43 crc kubenswrapper[4875]: I1007 08:15:43.811038 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="31deb1ca-dafe-4134-bf0c-d8357153d8ed" containerName="nova-manage" Oct 07 08:15:43 crc kubenswrapper[4875]: E1007 08:15:43.811062 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b243a457-ccd1-4c3d-b798-9924c6dfcf45" containerName="init" Oct 07 08:15:43 crc kubenswrapper[4875]: I1007 08:15:43.811068 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="b243a457-ccd1-4c3d-b798-9924c6dfcf45" containerName="init" Oct 07 08:15:43 crc kubenswrapper[4875]: E1007 08:15:43.811114 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b243a457-ccd1-4c3d-b798-9924c6dfcf45" containerName="dnsmasq-dns" Oct 07 08:15:43 crc kubenswrapper[4875]: I1007 08:15:43.811123 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="b243a457-ccd1-4c3d-b798-9924c6dfcf45" containerName="dnsmasq-dns" Oct 07 08:15:43 crc kubenswrapper[4875]: I1007 08:15:43.811395 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="f2915cba-5b63-4c46-8736-57aa976e2a67" containerName="nova-metadata-metadata" Oct 07 08:15:43 crc kubenswrapper[4875]: I1007 08:15:43.811420 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="31deb1ca-dafe-4134-bf0c-d8357153d8ed" containerName="nova-manage" Oct 07 08:15:43 crc kubenswrapper[4875]: I1007 08:15:43.811431 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="b243a457-ccd1-4c3d-b798-9924c6dfcf45" containerName="dnsmasq-dns" Oct 07 08:15:43 crc kubenswrapper[4875]: I1007 08:15:43.811446 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="bf652b8b-3c7e-4673-a2f4-4af434e1ce0b" containerName="nova-cell1-conductor-db-sync" Oct 07 08:15:43 crc kubenswrapper[4875]: I1007 08:15:43.811460 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="f2915cba-5b63-4c46-8736-57aa976e2a67" containerName="nova-metadata-log" Oct 07 08:15:43 crc kubenswrapper[4875]: I1007 08:15:43.813354 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 07 08:15:43 crc kubenswrapper[4875]: I1007 08:15:43.815933 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Oct 07 08:15:43 crc kubenswrapper[4875]: I1007 08:15:43.820514 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t4grl\" (UniqueName: \"kubernetes.io/projected/c56873e1-d427-407d-99f5-bde715855984-kube-api-access-t4grl\") pod \"nova-metadata-0\" (UID: \"c56873e1-d427-407d-99f5-bde715855984\") " pod="openstack/nova-metadata-0" Oct 07 08:15:43 crc kubenswrapper[4875]: I1007 08:15:43.820613 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/c56873e1-d427-407d-99f5-bde715855984-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"c56873e1-d427-407d-99f5-bde715855984\") " pod="openstack/nova-metadata-0" Oct 07 08:15:43 crc kubenswrapper[4875]: I1007 08:15:43.820668 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c56873e1-d427-407d-99f5-bde715855984-logs\") pod \"nova-metadata-0\" (UID: \"c56873e1-d427-407d-99f5-bde715855984\") " pod="openstack/nova-metadata-0" Oct 07 08:15:43 crc kubenswrapper[4875]: I1007 08:15:43.820706 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c56873e1-d427-407d-99f5-bde715855984-config-data\") pod \"nova-metadata-0\" (UID: \"c56873e1-d427-407d-99f5-bde715855984\") " pod="openstack/nova-metadata-0" Oct 07 08:15:43 crc kubenswrapper[4875]: I1007 08:15:43.820730 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c56873e1-d427-407d-99f5-bde715855984-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"c56873e1-d427-407d-99f5-bde715855984\") " pod="openstack/nova-metadata-0" Oct 07 08:15:43 crc kubenswrapper[4875]: I1007 08:15:43.820925 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 07 08:15:43 crc kubenswrapper[4875]: I1007 08:15:43.822698 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Oct 07 08:15:43 crc kubenswrapper[4875]: I1007 08:15:43.836367 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-0"] Oct 07 08:15:43 crc kubenswrapper[4875]: I1007 08:15:43.837967 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Oct 07 08:15:43 crc kubenswrapper[4875]: I1007 08:15:43.847545 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Oct 07 08:15:43 crc kubenswrapper[4875]: I1007 08:15:43.856066 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Oct 07 08:15:43 crc kubenswrapper[4875]: I1007 08:15:43.923500 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pw5lm\" (UniqueName: \"kubernetes.io/projected/eb928cc7-3310-4d3e-929c-d470f8a8f8bb-kube-api-access-pw5lm\") pod \"nova-cell1-conductor-0\" (UID: \"eb928cc7-3310-4d3e-929c-d470f8a8f8bb\") " pod="openstack/nova-cell1-conductor-0" Oct 07 08:15:43 crc kubenswrapper[4875]: I1007 08:15:43.923589 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/c56873e1-d427-407d-99f5-bde715855984-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"c56873e1-d427-407d-99f5-bde715855984\") " pod="openstack/nova-metadata-0" Oct 07 08:15:43 crc kubenswrapper[4875]: I1007 08:15:43.923681 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c56873e1-d427-407d-99f5-bde715855984-logs\") pod \"nova-metadata-0\" (UID: \"c56873e1-d427-407d-99f5-bde715855984\") " pod="openstack/nova-metadata-0" Oct 07 08:15:43 crc kubenswrapper[4875]: I1007 08:15:43.923720 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eb928cc7-3310-4d3e-929c-d470f8a8f8bb-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"eb928cc7-3310-4d3e-929c-d470f8a8f8bb\") " pod="openstack/nova-cell1-conductor-0" Oct 07 08:15:43 crc kubenswrapper[4875]: I1007 08:15:43.923743 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c56873e1-d427-407d-99f5-bde715855984-config-data\") pod \"nova-metadata-0\" (UID: \"c56873e1-d427-407d-99f5-bde715855984\") " pod="openstack/nova-metadata-0" Oct 07 08:15:43 crc kubenswrapper[4875]: I1007 08:15:43.923772 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c56873e1-d427-407d-99f5-bde715855984-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"c56873e1-d427-407d-99f5-bde715855984\") " pod="openstack/nova-metadata-0" Oct 07 08:15:43 crc kubenswrapper[4875]: I1007 08:15:43.923802 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eb928cc7-3310-4d3e-929c-d470f8a8f8bb-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"eb928cc7-3310-4d3e-929c-d470f8a8f8bb\") " pod="openstack/nova-cell1-conductor-0" Oct 07 08:15:43 crc kubenswrapper[4875]: I1007 08:15:43.923825 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t4grl\" (UniqueName: \"kubernetes.io/projected/c56873e1-d427-407d-99f5-bde715855984-kube-api-access-t4grl\") pod \"nova-metadata-0\" (UID: \"c56873e1-d427-407d-99f5-bde715855984\") " pod="openstack/nova-metadata-0" Oct 07 08:15:43 crc kubenswrapper[4875]: I1007 08:15:43.924784 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c56873e1-d427-407d-99f5-bde715855984-logs\") pod \"nova-metadata-0\" (UID: \"c56873e1-d427-407d-99f5-bde715855984\") " pod="openstack/nova-metadata-0" Oct 07 08:15:43 crc kubenswrapper[4875]: I1007 08:15:43.928392 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/c56873e1-d427-407d-99f5-bde715855984-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"c56873e1-d427-407d-99f5-bde715855984\") " pod="openstack/nova-metadata-0" Oct 07 08:15:43 crc kubenswrapper[4875]: I1007 08:15:43.928925 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c56873e1-d427-407d-99f5-bde715855984-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"c56873e1-d427-407d-99f5-bde715855984\") " pod="openstack/nova-metadata-0" Oct 07 08:15:43 crc kubenswrapper[4875]: I1007 08:15:43.935664 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c56873e1-d427-407d-99f5-bde715855984-config-data\") pod \"nova-metadata-0\" (UID: \"c56873e1-d427-407d-99f5-bde715855984\") " pod="openstack/nova-metadata-0" Oct 07 08:15:43 crc kubenswrapper[4875]: I1007 08:15:43.942762 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t4grl\" (UniqueName: \"kubernetes.io/projected/c56873e1-d427-407d-99f5-bde715855984-kube-api-access-t4grl\") pod \"nova-metadata-0\" (UID: \"c56873e1-d427-407d-99f5-bde715855984\") " pod="openstack/nova-metadata-0" Oct 07 08:15:44 crc kubenswrapper[4875]: I1007 08:15:44.025537 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pw5lm\" (UniqueName: \"kubernetes.io/projected/eb928cc7-3310-4d3e-929c-d470f8a8f8bb-kube-api-access-pw5lm\") pod \"nova-cell1-conductor-0\" (UID: \"eb928cc7-3310-4d3e-929c-d470f8a8f8bb\") " pod="openstack/nova-cell1-conductor-0" Oct 07 08:15:44 crc kubenswrapper[4875]: I1007 08:15:44.025665 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eb928cc7-3310-4d3e-929c-d470f8a8f8bb-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"eb928cc7-3310-4d3e-929c-d470f8a8f8bb\") " pod="openstack/nova-cell1-conductor-0" Oct 07 08:15:44 crc kubenswrapper[4875]: I1007 08:15:44.026132 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eb928cc7-3310-4d3e-929c-d470f8a8f8bb-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"eb928cc7-3310-4d3e-929c-d470f8a8f8bb\") " pod="openstack/nova-cell1-conductor-0" Oct 07 08:15:44 crc kubenswrapper[4875]: I1007 08:15:44.030466 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eb928cc7-3310-4d3e-929c-d470f8a8f8bb-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"eb928cc7-3310-4d3e-929c-d470f8a8f8bb\") " pod="openstack/nova-cell1-conductor-0" Oct 07 08:15:44 crc kubenswrapper[4875]: I1007 08:15:44.030470 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eb928cc7-3310-4d3e-929c-d470f8a8f8bb-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"eb928cc7-3310-4d3e-929c-d470f8a8f8bb\") " pod="openstack/nova-cell1-conductor-0" Oct 07 08:15:44 crc kubenswrapper[4875]: I1007 08:15:44.042819 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pw5lm\" (UniqueName: \"kubernetes.io/projected/eb928cc7-3310-4d3e-929c-d470f8a8f8bb-kube-api-access-pw5lm\") pod \"nova-cell1-conductor-0\" (UID: \"eb928cc7-3310-4d3e-929c-d470f8a8f8bb\") " pod="openstack/nova-cell1-conductor-0" Oct 07 08:15:44 crc kubenswrapper[4875]: I1007 08:15:44.161583 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 07 08:15:44 crc kubenswrapper[4875]: I1007 08:15:44.173838 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Oct 07 08:15:44 crc kubenswrapper[4875]: I1007 08:15:44.677623 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 07 08:15:44 crc kubenswrapper[4875]: W1007 08:15:44.680952 4875 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc56873e1_d427_407d_99f5_bde715855984.slice/crio-74cfb6ce7f7d092a81c955def03700cf63dcf3c0a51868b845d5ae3b9137843d WatchSource:0}: Error finding container 74cfb6ce7f7d092a81c955def03700cf63dcf3c0a51868b845d5ae3b9137843d: Status 404 returned error can't find the container with id 74cfb6ce7f7d092a81c955def03700cf63dcf3c0a51868b845d5ae3b9137843d Oct 07 08:15:44 crc kubenswrapper[4875]: I1007 08:15:44.721200 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Oct 07 08:15:45 crc kubenswrapper[4875]: I1007 08:15:45.655205 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"eb928cc7-3310-4d3e-929c-d470f8a8f8bb","Type":"ContainerStarted","Data":"0b99e08fef50b58064208a2a2455cf0b28bb75df8dbc7ec512f2dc9bf5699361"} Oct 07 08:15:45 crc kubenswrapper[4875]: I1007 08:15:45.656017 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-conductor-0" Oct 07 08:15:45 crc kubenswrapper[4875]: I1007 08:15:45.656033 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"eb928cc7-3310-4d3e-929c-d470f8a8f8bb","Type":"ContainerStarted","Data":"a9a9ab8ed2eb4470a44a6a3bdea2d5a921d33452498f3b6e1fee7027cf4d5080"} Oct 07 08:15:45 crc kubenswrapper[4875]: I1007 08:15:45.657127 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"c56873e1-d427-407d-99f5-bde715855984","Type":"ContainerStarted","Data":"63c82a09032e0dae2ca13ecbdb9f8bdec44fae1f6476248647978dfd49124728"} Oct 07 08:15:45 crc kubenswrapper[4875]: I1007 08:15:45.657152 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"c56873e1-d427-407d-99f5-bde715855984","Type":"ContainerStarted","Data":"ceed5e66a47bd6d646f02be4dc12dfb351b9ff4b9c5ca5df85ecbcfcc83455e0"} Oct 07 08:15:45 crc kubenswrapper[4875]: I1007 08:15:45.657162 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"c56873e1-d427-407d-99f5-bde715855984","Type":"ContainerStarted","Data":"74cfb6ce7f7d092a81c955def03700cf63dcf3c0a51868b845d5ae3b9137843d"} Oct 07 08:15:45 crc kubenswrapper[4875]: I1007 08:15:45.680956 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-0" podStartSLOduration=2.680938163 podStartE2EDuration="2.680938163s" podCreationTimestamp="2025-10-07 08:15:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 08:15:45.674602721 +0000 UTC m=+1170.634373274" watchObservedRunningTime="2025-10-07 08:15:45.680938163 +0000 UTC m=+1170.640708706" Oct 07 08:15:45 crc kubenswrapper[4875]: I1007 08:15:45.698899 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.698863317 podStartE2EDuration="2.698863317s" podCreationTimestamp="2025-10-07 08:15:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 08:15:45.690169129 +0000 UTC m=+1170.649939672" watchObservedRunningTime="2025-10-07 08:15:45.698863317 +0000 UTC m=+1170.658633860" Oct 07 08:15:45 crc kubenswrapper[4875]: I1007 08:15:45.708235 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f2915cba-5b63-4c46-8736-57aa976e2a67" path="/var/lib/kubelet/pods/f2915cba-5b63-4c46-8736-57aa976e2a67/volumes" Oct 07 08:15:45 crc kubenswrapper[4875]: E1007 08:15:45.859373 4875 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="063882deb3100b6172c24e106eab4590eaae32739f6d6c4b08e07b63af27b6b9" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Oct 07 08:15:45 crc kubenswrapper[4875]: E1007 08:15:45.864120 4875 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="063882deb3100b6172c24e106eab4590eaae32739f6d6c4b08e07b63af27b6b9" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Oct 07 08:15:45 crc kubenswrapper[4875]: E1007 08:15:45.869930 4875 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="063882deb3100b6172c24e106eab4590eaae32739f6d6c4b08e07b63af27b6b9" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Oct 07 08:15:45 crc kubenswrapper[4875]: E1007 08:15:45.870000 4875 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="2184acaa-b7d5-43ac-ae72-aafb225f40fe" containerName="nova-scheduler-scheduler" Oct 07 08:15:47 crc kubenswrapper[4875]: I1007 08:15:47.664759 4875 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/glance-default-internal-api-0" podUID="626af307-8e17-427d-a18f-70ad5e8cc62f" containerName="glance-httpd" probeResult="failure" output="Get \"https://10.217.0.152:9292/healthcheck\": dial tcp 10.217.0.152:9292: i/o timeout" Oct 07 08:15:47 crc kubenswrapper[4875]: I1007 08:15:47.670212 4875 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/glance-default-internal-api-0" podUID="626af307-8e17-427d-a18f-70ad5e8cc62f" containerName="glance-log" probeResult="failure" output="Get \"https://10.217.0.152:9292/healthcheck\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Oct 07 08:15:47 crc kubenswrapper[4875]: I1007 08:15:47.686928 4875 generic.go:334] "Generic (PLEG): container finished" podID="ea3e6a51-61d8-4931-9a0b-6b49839a6f30" containerID="ef5c55196f486fcaf605c9a56b822a4250f48fd2dec2e0d1ca942077e9642376" exitCode=0 Oct 07 08:15:47 crc kubenswrapper[4875]: I1007 08:15:47.686990 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"ea3e6a51-61d8-4931-9a0b-6b49839a6f30","Type":"ContainerDied","Data":"ef5c55196f486fcaf605c9a56b822a4250f48fd2dec2e0d1ca942077e9642376"} Oct 07 08:15:47 crc kubenswrapper[4875]: I1007 08:15:47.687030 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"ea3e6a51-61d8-4931-9a0b-6b49839a6f30","Type":"ContainerDied","Data":"622032ceb4539f5f395043790744d203c69e7266af2d20955e51bb866c1a46bc"} Oct 07 08:15:47 crc kubenswrapper[4875]: I1007 08:15:47.687045 4875 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="622032ceb4539f5f395043790744d203c69e7266af2d20955e51bb866c1a46bc" Oct 07 08:15:47 crc kubenswrapper[4875]: I1007 08:15:47.716806 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 07 08:15:47 crc kubenswrapper[4875]: I1007 08:15:47.809471 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Oct 07 08:15:47 crc kubenswrapper[4875]: I1007 08:15:47.907104 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ea3e6a51-61d8-4931-9a0b-6b49839a6f30-config-data\") pod \"ea3e6a51-61d8-4931-9a0b-6b49839a6f30\" (UID: \"ea3e6a51-61d8-4931-9a0b-6b49839a6f30\") " Oct 07 08:15:47 crc kubenswrapper[4875]: I1007 08:15:47.907511 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ea3e6a51-61d8-4931-9a0b-6b49839a6f30-logs\") pod \"ea3e6a51-61d8-4931-9a0b-6b49839a6f30\" (UID: \"ea3e6a51-61d8-4931-9a0b-6b49839a6f30\") " Oct 07 08:15:47 crc kubenswrapper[4875]: I1007 08:15:47.907704 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ea3e6a51-61d8-4931-9a0b-6b49839a6f30-combined-ca-bundle\") pod \"ea3e6a51-61d8-4931-9a0b-6b49839a6f30\" (UID: \"ea3e6a51-61d8-4931-9a0b-6b49839a6f30\") " Oct 07 08:15:47 crc kubenswrapper[4875]: I1007 08:15:47.907761 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6zsr9\" (UniqueName: \"kubernetes.io/projected/ea3e6a51-61d8-4931-9a0b-6b49839a6f30-kube-api-access-6zsr9\") pod \"ea3e6a51-61d8-4931-9a0b-6b49839a6f30\" (UID: \"ea3e6a51-61d8-4931-9a0b-6b49839a6f30\") " Oct 07 08:15:47 crc kubenswrapper[4875]: I1007 08:15:47.908354 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ea3e6a51-61d8-4931-9a0b-6b49839a6f30-logs" (OuterVolumeSpecName: "logs") pod "ea3e6a51-61d8-4931-9a0b-6b49839a6f30" (UID: "ea3e6a51-61d8-4931-9a0b-6b49839a6f30"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 08:15:47 crc kubenswrapper[4875]: I1007 08:15:47.913025 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ea3e6a51-61d8-4931-9a0b-6b49839a6f30-kube-api-access-6zsr9" (OuterVolumeSpecName: "kube-api-access-6zsr9") pod "ea3e6a51-61d8-4931-9a0b-6b49839a6f30" (UID: "ea3e6a51-61d8-4931-9a0b-6b49839a6f30"). InnerVolumeSpecName "kube-api-access-6zsr9". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 08:15:47 crc kubenswrapper[4875]: I1007 08:15:47.940025 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ea3e6a51-61d8-4931-9a0b-6b49839a6f30-config-data" (OuterVolumeSpecName: "config-data") pod "ea3e6a51-61d8-4931-9a0b-6b49839a6f30" (UID: "ea3e6a51-61d8-4931-9a0b-6b49839a6f30"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:15:47 crc kubenswrapper[4875]: I1007 08:15:47.955836 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ea3e6a51-61d8-4931-9a0b-6b49839a6f30-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ea3e6a51-61d8-4931-9a0b-6b49839a6f30" (UID: "ea3e6a51-61d8-4931-9a0b-6b49839a6f30"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:15:48 crc kubenswrapper[4875]: I1007 08:15:48.010263 4875 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ea3e6a51-61d8-4931-9a0b-6b49839a6f30-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 08:15:48 crc kubenswrapper[4875]: I1007 08:15:48.010311 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6zsr9\" (UniqueName: \"kubernetes.io/projected/ea3e6a51-61d8-4931-9a0b-6b49839a6f30-kube-api-access-6zsr9\") on node \"crc\" DevicePath \"\"" Oct 07 08:15:48 crc kubenswrapper[4875]: I1007 08:15:48.010327 4875 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ea3e6a51-61d8-4931-9a0b-6b49839a6f30-config-data\") on node \"crc\" DevicePath \"\"" Oct 07 08:15:48 crc kubenswrapper[4875]: I1007 08:15:48.010338 4875 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ea3e6a51-61d8-4931-9a0b-6b49839a6f30-logs\") on node \"crc\" DevicePath \"\"" Oct 07 08:15:48 crc kubenswrapper[4875]: I1007 08:15:48.568932 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 07 08:15:48 crc kubenswrapper[4875]: I1007 08:15:48.723846 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-678jm\" (UniqueName: \"kubernetes.io/projected/2184acaa-b7d5-43ac-ae72-aafb225f40fe-kube-api-access-678jm\") pod \"2184acaa-b7d5-43ac-ae72-aafb225f40fe\" (UID: \"2184acaa-b7d5-43ac-ae72-aafb225f40fe\") " Oct 07 08:15:48 crc kubenswrapper[4875]: I1007 08:15:48.724345 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2184acaa-b7d5-43ac-ae72-aafb225f40fe-config-data\") pod \"2184acaa-b7d5-43ac-ae72-aafb225f40fe\" (UID: \"2184acaa-b7d5-43ac-ae72-aafb225f40fe\") " Oct 07 08:15:48 crc kubenswrapper[4875]: I1007 08:15:48.724575 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2184acaa-b7d5-43ac-ae72-aafb225f40fe-combined-ca-bundle\") pod \"2184acaa-b7d5-43ac-ae72-aafb225f40fe\" (UID: \"2184acaa-b7d5-43ac-ae72-aafb225f40fe\") " Oct 07 08:15:48 crc kubenswrapper[4875]: I1007 08:15:48.760671 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2184acaa-b7d5-43ac-ae72-aafb225f40fe-kube-api-access-678jm" (OuterVolumeSpecName: "kube-api-access-678jm") pod "2184acaa-b7d5-43ac-ae72-aafb225f40fe" (UID: "2184acaa-b7d5-43ac-ae72-aafb225f40fe"). InnerVolumeSpecName "kube-api-access-678jm". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 08:15:48 crc kubenswrapper[4875]: I1007 08:15:48.775364 4875 generic.go:334] "Generic (PLEG): container finished" podID="2184acaa-b7d5-43ac-ae72-aafb225f40fe" containerID="063882deb3100b6172c24e106eab4590eaae32739f6d6c4b08e07b63af27b6b9" exitCode=0 Oct 07 08:15:48 crc kubenswrapper[4875]: I1007 08:15:48.775504 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 07 08:15:48 crc kubenswrapper[4875]: I1007 08:15:48.782100 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"2184acaa-b7d5-43ac-ae72-aafb225f40fe","Type":"ContainerDied","Data":"063882deb3100b6172c24e106eab4590eaae32739f6d6c4b08e07b63af27b6b9"} Oct 07 08:15:48 crc kubenswrapper[4875]: I1007 08:15:48.782185 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"2184acaa-b7d5-43ac-ae72-aafb225f40fe","Type":"ContainerDied","Data":"f49b98a849256fdc212a874a33a6ebee78cb3cbfdcb120cf336befcde29b2c3b"} Oct 07 08:15:48 crc kubenswrapper[4875]: I1007 08:15:48.782216 4875 scope.go:117] "RemoveContainer" containerID="063882deb3100b6172c24e106eab4590eaae32739f6d6c4b08e07b63af27b6b9" Oct 07 08:15:48 crc kubenswrapper[4875]: I1007 08:15:48.782532 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 07 08:15:48 crc kubenswrapper[4875]: I1007 08:15:48.833550 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-678jm\" (UniqueName: \"kubernetes.io/projected/2184acaa-b7d5-43ac-ae72-aafb225f40fe-kube-api-access-678jm\") on node \"crc\" DevicePath \"\"" Oct 07 08:15:48 crc kubenswrapper[4875]: I1007 08:15:48.837175 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2184acaa-b7d5-43ac-ae72-aafb225f40fe-config-data" (OuterVolumeSpecName: "config-data") pod "2184acaa-b7d5-43ac-ae72-aafb225f40fe" (UID: "2184acaa-b7d5-43ac-ae72-aafb225f40fe"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:15:48 crc kubenswrapper[4875]: I1007 08:15:48.839036 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2184acaa-b7d5-43ac-ae72-aafb225f40fe-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2184acaa-b7d5-43ac-ae72-aafb225f40fe" (UID: "2184acaa-b7d5-43ac-ae72-aafb225f40fe"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:15:48 crc kubenswrapper[4875]: I1007 08:15:48.888362 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Oct 07 08:15:48 crc kubenswrapper[4875]: I1007 08:15:48.911602 4875 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Oct 07 08:15:48 crc kubenswrapper[4875]: I1007 08:15:48.933779 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Oct 07 08:15:48 crc kubenswrapper[4875]: E1007 08:15:48.935446 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2184acaa-b7d5-43ac-ae72-aafb225f40fe" containerName="nova-scheduler-scheduler" Oct 07 08:15:48 crc kubenswrapper[4875]: I1007 08:15:48.935478 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="2184acaa-b7d5-43ac-ae72-aafb225f40fe" containerName="nova-scheduler-scheduler" Oct 07 08:15:48 crc kubenswrapper[4875]: E1007 08:15:48.935533 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ea3e6a51-61d8-4931-9a0b-6b49839a6f30" containerName="nova-api-api" Oct 07 08:15:48 crc kubenswrapper[4875]: I1007 08:15:48.935542 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="ea3e6a51-61d8-4931-9a0b-6b49839a6f30" containerName="nova-api-api" Oct 07 08:15:48 crc kubenswrapper[4875]: E1007 08:15:48.935606 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ea3e6a51-61d8-4931-9a0b-6b49839a6f30" containerName="nova-api-log" Oct 07 08:15:48 crc kubenswrapper[4875]: I1007 08:15:48.935617 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="ea3e6a51-61d8-4931-9a0b-6b49839a6f30" containerName="nova-api-log" Oct 07 08:15:48 crc kubenswrapper[4875]: I1007 08:15:48.935926 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="2184acaa-b7d5-43ac-ae72-aafb225f40fe" containerName="nova-scheduler-scheduler" Oct 07 08:15:48 crc kubenswrapper[4875]: I1007 08:15:48.935954 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="ea3e6a51-61d8-4931-9a0b-6b49839a6f30" containerName="nova-api-log" Oct 07 08:15:48 crc kubenswrapper[4875]: I1007 08:15:48.935996 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="ea3e6a51-61d8-4931-9a0b-6b49839a6f30" containerName="nova-api-api" Oct 07 08:15:48 crc kubenswrapper[4875]: I1007 08:15:48.937514 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 07 08:15:48 crc kubenswrapper[4875]: I1007 08:15:48.940066 4875 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2184acaa-b7d5-43ac-ae72-aafb225f40fe-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 08:15:48 crc kubenswrapper[4875]: I1007 08:15:48.940092 4875 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2184acaa-b7d5-43ac-ae72-aafb225f40fe-config-data\") on node \"crc\" DevicePath \"\"" Oct 07 08:15:48 crc kubenswrapper[4875]: I1007 08:15:48.940568 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Oct 07 08:15:48 crc kubenswrapper[4875]: I1007 08:15:48.944378 4875 scope.go:117] "RemoveContainer" containerID="063882deb3100b6172c24e106eab4590eaae32739f6d6c4b08e07b63af27b6b9" Oct 07 08:15:48 crc kubenswrapper[4875]: E1007 08:15:48.945086 4875 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"063882deb3100b6172c24e106eab4590eaae32739f6d6c4b08e07b63af27b6b9\": container with ID starting with 063882deb3100b6172c24e106eab4590eaae32739f6d6c4b08e07b63af27b6b9 not found: ID does not exist" containerID="063882deb3100b6172c24e106eab4590eaae32739f6d6c4b08e07b63af27b6b9" Oct 07 08:15:48 crc kubenswrapper[4875]: I1007 08:15:48.945152 4875 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"063882deb3100b6172c24e106eab4590eaae32739f6d6c4b08e07b63af27b6b9"} err="failed to get container status \"063882deb3100b6172c24e106eab4590eaae32739f6d6c4b08e07b63af27b6b9\": rpc error: code = NotFound desc = could not find container \"063882deb3100b6172c24e106eab4590eaae32739f6d6c4b08e07b63af27b6b9\": container with ID starting with 063882deb3100b6172c24e106eab4590eaae32739f6d6c4b08e07b63af27b6b9 not found: ID does not exist" Oct 07 08:15:48 crc kubenswrapper[4875]: I1007 08:15:48.954735 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 07 08:15:49 crc kubenswrapper[4875]: I1007 08:15:49.043061 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6202e6ec-74a4-4e11-a274-51bb9ca4a211-logs\") pod \"nova-api-0\" (UID: \"6202e6ec-74a4-4e11-a274-51bb9ca4a211\") " pod="openstack/nova-api-0" Oct 07 08:15:49 crc kubenswrapper[4875]: I1007 08:15:49.043276 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xnz88\" (UniqueName: \"kubernetes.io/projected/6202e6ec-74a4-4e11-a274-51bb9ca4a211-kube-api-access-xnz88\") pod \"nova-api-0\" (UID: \"6202e6ec-74a4-4e11-a274-51bb9ca4a211\") " pod="openstack/nova-api-0" Oct 07 08:15:49 crc kubenswrapper[4875]: I1007 08:15:49.043327 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6202e6ec-74a4-4e11-a274-51bb9ca4a211-config-data\") pod \"nova-api-0\" (UID: \"6202e6ec-74a4-4e11-a274-51bb9ca4a211\") " pod="openstack/nova-api-0" Oct 07 08:15:49 crc kubenswrapper[4875]: I1007 08:15:49.043373 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6202e6ec-74a4-4e11-a274-51bb9ca4a211-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"6202e6ec-74a4-4e11-a274-51bb9ca4a211\") " pod="openstack/nova-api-0" Oct 07 08:15:49 crc kubenswrapper[4875]: I1007 08:15:49.123770 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Oct 07 08:15:49 crc kubenswrapper[4875]: I1007 08:15:49.135806 4875 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Oct 07 08:15:49 crc kubenswrapper[4875]: I1007 08:15:49.145776 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xnz88\" (UniqueName: \"kubernetes.io/projected/6202e6ec-74a4-4e11-a274-51bb9ca4a211-kube-api-access-xnz88\") pod \"nova-api-0\" (UID: \"6202e6ec-74a4-4e11-a274-51bb9ca4a211\") " pod="openstack/nova-api-0" Oct 07 08:15:49 crc kubenswrapper[4875]: I1007 08:15:49.145903 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6202e6ec-74a4-4e11-a274-51bb9ca4a211-config-data\") pod \"nova-api-0\" (UID: \"6202e6ec-74a4-4e11-a274-51bb9ca4a211\") " pod="openstack/nova-api-0" Oct 07 08:15:49 crc kubenswrapper[4875]: I1007 08:15:49.146961 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6202e6ec-74a4-4e11-a274-51bb9ca4a211-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"6202e6ec-74a4-4e11-a274-51bb9ca4a211\") " pod="openstack/nova-api-0" Oct 07 08:15:49 crc kubenswrapper[4875]: I1007 08:15:49.147100 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6202e6ec-74a4-4e11-a274-51bb9ca4a211-logs\") pod \"nova-api-0\" (UID: \"6202e6ec-74a4-4e11-a274-51bb9ca4a211\") " pod="openstack/nova-api-0" Oct 07 08:15:49 crc kubenswrapper[4875]: I1007 08:15:49.147776 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6202e6ec-74a4-4e11-a274-51bb9ca4a211-logs\") pod \"nova-api-0\" (UID: \"6202e6ec-74a4-4e11-a274-51bb9ca4a211\") " pod="openstack/nova-api-0" Oct 07 08:15:49 crc kubenswrapper[4875]: I1007 08:15:49.157172 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6202e6ec-74a4-4e11-a274-51bb9ca4a211-config-data\") pod \"nova-api-0\" (UID: \"6202e6ec-74a4-4e11-a274-51bb9ca4a211\") " pod="openstack/nova-api-0" Oct 07 08:15:49 crc kubenswrapper[4875]: I1007 08:15:49.159678 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6202e6ec-74a4-4e11-a274-51bb9ca4a211-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"6202e6ec-74a4-4e11-a274-51bb9ca4a211\") " pod="openstack/nova-api-0" Oct 07 08:15:49 crc kubenswrapper[4875]: I1007 08:15:49.159792 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Oct 07 08:15:49 crc kubenswrapper[4875]: I1007 08:15:49.161769 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 07 08:15:49 crc kubenswrapper[4875]: I1007 08:15:49.161781 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Oct 07 08:15:49 crc kubenswrapper[4875]: I1007 08:15:49.161857 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Oct 07 08:15:49 crc kubenswrapper[4875]: I1007 08:15:49.166480 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Oct 07 08:15:49 crc kubenswrapper[4875]: I1007 08:15:49.184637 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xnz88\" (UniqueName: \"kubernetes.io/projected/6202e6ec-74a4-4e11-a274-51bb9ca4a211-kube-api-access-xnz88\") pod \"nova-api-0\" (UID: \"6202e6ec-74a4-4e11-a274-51bb9ca4a211\") " pod="openstack/nova-api-0" Oct 07 08:15:49 crc kubenswrapper[4875]: I1007 08:15:49.196857 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Oct 07 08:15:49 crc kubenswrapper[4875]: I1007 08:15:49.231679 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-conductor-0" Oct 07 08:15:49 crc kubenswrapper[4875]: I1007 08:15:49.263790 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 07 08:15:49 crc kubenswrapper[4875]: I1007 08:15:49.353242 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fe3c3ee8-4355-42b1-9ae1-bdb1cf1e4b93-config-data\") pod \"nova-scheduler-0\" (UID: \"fe3c3ee8-4355-42b1-9ae1-bdb1cf1e4b93\") " pod="openstack/nova-scheduler-0" Oct 07 08:15:49 crc kubenswrapper[4875]: I1007 08:15:49.353382 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fe3c3ee8-4355-42b1-9ae1-bdb1cf1e4b93-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"fe3c3ee8-4355-42b1-9ae1-bdb1cf1e4b93\") " pod="openstack/nova-scheduler-0" Oct 07 08:15:49 crc kubenswrapper[4875]: I1007 08:15:49.353421 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qfzjf\" (UniqueName: \"kubernetes.io/projected/fe3c3ee8-4355-42b1-9ae1-bdb1cf1e4b93-kube-api-access-qfzjf\") pod \"nova-scheduler-0\" (UID: \"fe3c3ee8-4355-42b1-9ae1-bdb1cf1e4b93\") " pod="openstack/nova-scheduler-0" Oct 07 08:15:49 crc kubenswrapper[4875]: I1007 08:15:49.459593 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fe3c3ee8-4355-42b1-9ae1-bdb1cf1e4b93-config-data\") pod \"nova-scheduler-0\" (UID: \"fe3c3ee8-4355-42b1-9ae1-bdb1cf1e4b93\") " pod="openstack/nova-scheduler-0" Oct 07 08:15:49 crc kubenswrapper[4875]: I1007 08:15:49.459675 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fe3c3ee8-4355-42b1-9ae1-bdb1cf1e4b93-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"fe3c3ee8-4355-42b1-9ae1-bdb1cf1e4b93\") " pod="openstack/nova-scheduler-0" Oct 07 08:15:49 crc kubenswrapper[4875]: I1007 08:15:49.459708 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qfzjf\" (UniqueName: \"kubernetes.io/projected/fe3c3ee8-4355-42b1-9ae1-bdb1cf1e4b93-kube-api-access-qfzjf\") pod \"nova-scheduler-0\" (UID: \"fe3c3ee8-4355-42b1-9ae1-bdb1cf1e4b93\") " pod="openstack/nova-scheduler-0" Oct 07 08:15:49 crc kubenswrapper[4875]: I1007 08:15:49.471900 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fe3c3ee8-4355-42b1-9ae1-bdb1cf1e4b93-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"fe3c3ee8-4355-42b1-9ae1-bdb1cf1e4b93\") " pod="openstack/nova-scheduler-0" Oct 07 08:15:49 crc kubenswrapper[4875]: I1007 08:15:49.483019 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qfzjf\" (UniqueName: \"kubernetes.io/projected/fe3c3ee8-4355-42b1-9ae1-bdb1cf1e4b93-kube-api-access-qfzjf\") pod \"nova-scheduler-0\" (UID: \"fe3c3ee8-4355-42b1-9ae1-bdb1cf1e4b93\") " pod="openstack/nova-scheduler-0" Oct 07 08:15:49 crc kubenswrapper[4875]: I1007 08:15:49.484500 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fe3c3ee8-4355-42b1-9ae1-bdb1cf1e4b93-config-data\") pod \"nova-scheduler-0\" (UID: \"fe3c3ee8-4355-42b1-9ae1-bdb1cf1e4b93\") " pod="openstack/nova-scheduler-0" Oct 07 08:15:49 crc kubenswrapper[4875]: I1007 08:15:49.498823 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 07 08:15:49 crc kubenswrapper[4875]: I1007 08:15:49.722030 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2184acaa-b7d5-43ac-ae72-aafb225f40fe" path="/var/lib/kubelet/pods/2184acaa-b7d5-43ac-ae72-aafb225f40fe/volumes" Oct 07 08:15:49 crc kubenswrapper[4875]: I1007 08:15:49.723192 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ea3e6a51-61d8-4931-9a0b-6b49839a6f30" path="/var/lib/kubelet/pods/ea3e6a51-61d8-4931-9a0b-6b49839a6f30/volumes" Oct 07 08:15:49 crc kubenswrapper[4875]: I1007 08:15:49.822566 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 07 08:15:49 crc kubenswrapper[4875]: W1007 08:15:49.828035 4875 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6202e6ec_74a4_4e11_a274_51bb9ca4a211.slice/crio-1afae2dd306fbc1caa4723e687efa764e6551aca2cedce33e9b8acc7ae9b1cee WatchSource:0}: Error finding container 1afae2dd306fbc1caa4723e687efa764e6551aca2cedce33e9b8acc7ae9b1cee: Status 404 returned error can't find the container with id 1afae2dd306fbc1caa4723e687efa764e6551aca2cedce33e9b8acc7ae9b1cee Oct 07 08:15:50 crc kubenswrapper[4875]: I1007 08:15:50.041218 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Oct 07 08:15:50 crc kubenswrapper[4875]: I1007 08:15:50.820108 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"6202e6ec-74a4-4e11-a274-51bb9ca4a211","Type":"ContainerStarted","Data":"db1c822e8f95fdfa6d08bf82ddd0dc5db04e35743f98a61a2badf088b1dd9894"} Oct 07 08:15:50 crc kubenswrapper[4875]: I1007 08:15:50.821336 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"6202e6ec-74a4-4e11-a274-51bb9ca4a211","Type":"ContainerStarted","Data":"19910f58cd42fe42a26d286bec82884d0f0f7e5bbd83e34d5da9b9f9f4d8166b"} Oct 07 08:15:50 crc kubenswrapper[4875]: I1007 08:15:50.821371 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"6202e6ec-74a4-4e11-a274-51bb9ca4a211","Type":"ContainerStarted","Data":"1afae2dd306fbc1caa4723e687efa764e6551aca2cedce33e9b8acc7ae9b1cee"} Oct 07 08:15:50 crc kubenswrapper[4875]: I1007 08:15:50.839385 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"fe3c3ee8-4355-42b1-9ae1-bdb1cf1e4b93","Type":"ContainerStarted","Data":"00bfe9245c4977eb15e08016b4ad98460fceea28996dbdab65fbd2537074ccb8"} Oct 07 08:15:50 crc kubenswrapper[4875]: I1007 08:15:50.839464 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"fe3c3ee8-4355-42b1-9ae1-bdb1cf1e4b93","Type":"ContainerStarted","Data":"de841348632b3c11537aeb5ca9ba34346573998e9e5428fc4542bb3f08f8d391"} Oct 07 08:15:50 crc kubenswrapper[4875]: I1007 08:15:50.841922 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.841897345 podStartE2EDuration="2.841897345s" podCreationTimestamp="2025-10-07 08:15:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 08:15:50.839422726 +0000 UTC m=+1175.799193279" watchObservedRunningTime="2025-10-07 08:15:50.841897345 +0000 UTC m=+1175.801667888" Oct 07 08:15:50 crc kubenswrapper[4875]: I1007 08:15:50.864148 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=1.8641220459999999 podStartE2EDuration="1.864122046s" podCreationTimestamp="2025-10-07 08:15:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 08:15:50.856623256 +0000 UTC m=+1175.816393809" watchObservedRunningTime="2025-10-07 08:15:50.864122046 +0000 UTC m=+1175.823892589" Oct 07 08:15:53 crc kubenswrapper[4875]: I1007 08:15:53.016748 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Oct 07 08:15:53 crc kubenswrapper[4875]: I1007 08:15:53.018388 4875 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/kube-state-metrics-0" podUID="c187d3cc-62d2-43b8-9616-03670de368da" containerName="kube-state-metrics" containerID="cri-o://1ec433a903908114f0a6a965638436f4bb59bcd23e2691e9bbf1111cbceaef63" gracePeriod=30 Oct 07 08:15:53 crc kubenswrapper[4875]: I1007 08:15:53.563730 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Oct 07 08:15:53 crc kubenswrapper[4875]: I1007 08:15:53.575276 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nkmd4\" (UniqueName: \"kubernetes.io/projected/c187d3cc-62d2-43b8-9616-03670de368da-kube-api-access-nkmd4\") pod \"c187d3cc-62d2-43b8-9616-03670de368da\" (UID: \"c187d3cc-62d2-43b8-9616-03670de368da\") " Oct 07 08:15:53 crc kubenswrapper[4875]: I1007 08:15:53.582441 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c187d3cc-62d2-43b8-9616-03670de368da-kube-api-access-nkmd4" (OuterVolumeSpecName: "kube-api-access-nkmd4") pod "c187d3cc-62d2-43b8-9616-03670de368da" (UID: "c187d3cc-62d2-43b8-9616-03670de368da"). InnerVolumeSpecName "kube-api-access-nkmd4". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 08:15:53 crc kubenswrapper[4875]: I1007 08:15:53.677947 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nkmd4\" (UniqueName: \"kubernetes.io/projected/c187d3cc-62d2-43b8-9616-03670de368da-kube-api-access-nkmd4\") on node \"crc\" DevicePath \"\"" Oct 07 08:15:53 crc kubenswrapper[4875]: I1007 08:15:53.874534 4875 generic.go:334] "Generic (PLEG): container finished" podID="c187d3cc-62d2-43b8-9616-03670de368da" containerID="1ec433a903908114f0a6a965638436f4bb59bcd23e2691e9bbf1111cbceaef63" exitCode=2 Oct 07 08:15:53 crc kubenswrapper[4875]: I1007 08:15:53.874593 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Oct 07 08:15:53 crc kubenswrapper[4875]: I1007 08:15:53.874593 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"c187d3cc-62d2-43b8-9616-03670de368da","Type":"ContainerDied","Data":"1ec433a903908114f0a6a965638436f4bb59bcd23e2691e9bbf1111cbceaef63"} Oct 07 08:15:53 crc kubenswrapper[4875]: I1007 08:15:53.874771 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"c187d3cc-62d2-43b8-9616-03670de368da","Type":"ContainerDied","Data":"8d64567000b1e09fec2ee484018c2ec91e8a899acc0a59ab09e9594e3bea66b8"} Oct 07 08:15:53 crc kubenswrapper[4875]: I1007 08:15:53.874809 4875 scope.go:117] "RemoveContainer" containerID="1ec433a903908114f0a6a965638436f4bb59bcd23e2691e9bbf1111cbceaef63" Oct 07 08:15:53 crc kubenswrapper[4875]: I1007 08:15:53.901298 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Oct 07 08:15:53 crc kubenswrapper[4875]: I1007 08:15:53.911965 4875 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/kube-state-metrics-0"] Oct 07 08:15:53 crc kubenswrapper[4875]: I1007 08:15:53.915982 4875 scope.go:117] "RemoveContainer" containerID="1ec433a903908114f0a6a965638436f4bb59bcd23e2691e9bbf1111cbceaef63" Oct 07 08:15:53 crc kubenswrapper[4875]: E1007 08:15:53.916705 4875 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1ec433a903908114f0a6a965638436f4bb59bcd23e2691e9bbf1111cbceaef63\": container with ID starting with 1ec433a903908114f0a6a965638436f4bb59bcd23e2691e9bbf1111cbceaef63 not found: ID does not exist" containerID="1ec433a903908114f0a6a965638436f4bb59bcd23e2691e9bbf1111cbceaef63" Oct 07 08:15:53 crc kubenswrapper[4875]: I1007 08:15:53.916762 4875 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1ec433a903908114f0a6a965638436f4bb59bcd23e2691e9bbf1111cbceaef63"} err="failed to get container status \"1ec433a903908114f0a6a965638436f4bb59bcd23e2691e9bbf1111cbceaef63\": rpc error: code = NotFound desc = could not find container \"1ec433a903908114f0a6a965638436f4bb59bcd23e2691e9bbf1111cbceaef63\": container with ID starting with 1ec433a903908114f0a6a965638436f4bb59bcd23e2691e9bbf1111cbceaef63 not found: ID does not exist" Oct 07 08:15:53 crc kubenswrapper[4875]: I1007 08:15:53.919922 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Oct 07 08:15:53 crc kubenswrapper[4875]: E1007 08:15:53.920362 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c187d3cc-62d2-43b8-9616-03670de368da" containerName="kube-state-metrics" Oct 07 08:15:53 crc kubenswrapper[4875]: I1007 08:15:53.920384 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="c187d3cc-62d2-43b8-9616-03670de368da" containerName="kube-state-metrics" Oct 07 08:15:53 crc kubenswrapper[4875]: I1007 08:15:53.920613 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="c187d3cc-62d2-43b8-9616-03670de368da" containerName="kube-state-metrics" Oct 07 08:15:53 crc kubenswrapper[4875]: I1007 08:15:53.921871 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Oct 07 08:15:53 crc kubenswrapper[4875]: I1007 08:15:53.924148 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"kube-state-metrics-tls-config" Oct 07 08:15:53 crc kubenswrapper[4875]: I1007 08:15:53.924602 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-kube-state-metrics-svc" Oct 07 08:15:53 crc kubenswrapper[4875]: I1007 08:15:53.931387 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Oct 07 08:15:54 crc kubenswrapper[4875]: I1007 08:15:54.091936 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cd2084f6-1596-45c2-a4ba-1cbd7a1ca565-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"cd2084f6-1596-45c2-a4ba-1cbd7a1ca565\") " pod="openstack/kube-state-metrics-0" Oct 07 08:15:54 crc kubenswrapper[4875]: I1007 08:15:54.092059 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mfzvz\" (UniqueName: \"kubernetes.io/projected/cd2084f6-1596-45c2-a4ba-1cbd7a1ca565-kube-api-access-mfzvz\") pod \"kube-state-metrics-0\" (UID: \"cd2084f6-1596-45c2-a4ba-1cbd7a1ca565\") " pod="openstack/kube-state-metrics-0" Oct 07 08:15:54 crc kubenswrapper[4875]: I1007 08:15:54.092103 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/cd2084f6-1596-45c2-a4ba-1cbd7a1ca565-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"cd2084f6-1596-45c2-a4ba-1cbd7a1ca565\") " pod="openstack/kube-state-metrics-0" Oct 07 08:15:54 crc kubenswrapper[4875]: I1007 08:15:54.092149 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/cd2084f6-1596-45c2-a4ba-1cbd7a1ca565-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"cd2084f6-1596-45c2-a4ba-1cbd7a1ca565\") " pod="openstack/kube-state-metrics-0" Oct 07 08:15:54 crc kubenswrapper[4875]: I1007 08:15:54.162481 4875 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Oct 07 08:15:54 crc kubenswrapper[4875]: I1007 08:15:54.162533 4875 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Oct 07 08:15:54 crc kubenswrapper[4875]: I1007 08:15:54.194366 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cd2084f6-1596-45c2-a4ba-1cbd7a1ca565-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"cd2084f6-1596-45c2-a4ba-1cbd7a1ca565\") " pod="openstack/kube-state-metrics-0" Oct 07 08:15:54 crc kubenswrapper[4875]: I1007 08:15:54.194857 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mfzvz\" (UniqueName: \"kubernetes.io/projected/cd2084f6-1596-45c2-a4ba-1cbd7a1ca565-kube-api-access-mfzvz\") pod \"kube-state-metrics-0\" (UID: \"cd2084f6-1596-45c2-a4ba-1cbd7a1ca565\") " pod="openstack/kube-state-metrics-0" Oct 07 08:15:54 crc kubenswrapper[4875]: I1007 08:15:54.194911 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/cd2084f6-1596-45c2-a4ba-1cbd7a1ca565-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"cd2084f6-1596-45c2-a4ba-1cbd7a1ca565\") " pod="openstack/kube-state-metrics-0" Oct 07 08:15:54 crc kubenswrapper[4875]: I1007 08:15:54.194950 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/cd2084f6-1596-45c2-a4ba-1cbd7a1ca565-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"cd2084f6-1596-45c2-a4ba-1cbd7a1ca565\") " pod="openstack/kube-state-metrics-0" Oct 07 08:15:54 crc kubenswrapper[4875]: I1007 08:15:54.200193 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/cd2084f6-1596-45c2-a4ba-1cbd7a1ca565-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"cd2084f6-1596-45c2-a4ba-1cbd7a1ca565\") " pod="openstack/kube-state-metrics-0" Oct 07 08:15:54 crc kubenswrapper[4875]: I1007 08:15:54.201746 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/cd2084f6-1596-45c2-a4ba-1cbd7a1ca565-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"cd2084f6-1596-45c2-a4ba-1cbd7a1ca565\") " pod="openstack/kube-state-metrics-0" Oct 07 08:15:54 crc kubenswrapper[4875]: I1007 08:15:54.216020 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cd2084f6-1596-45c2-a4ba-1cbd7a1ca565-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"cd2084f6-1596-45c2-a4ba-1cbd7a1ca565\") " pod="openstack/kube-state-metrics-0" Oct 07 08:15:54 crc kubenswrapper[4875]: I1007 08:15:54.223164 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mfzvz\" (UniqueName: \"kubernetes.io/projected/cd2084f6-1596-45c2-a4ba-1cbd7a1ca565-kube-api-access-mfzvz\") pod \"kube-state-metrics-0\" (UID: \"cd2084f6-1596-45c2-a4ba-1cbd7a1ca565\") " pod="openstack/kube-state-metrics-0" Oct 07 08:15:54 crc kubenswrapper[4875]: I1007 08:15:54.250839 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Oct 07 08:15:54 crc kubenswrapper[4875]: I1007 08:15:54.499644 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Oct 07 08:15:54 crc kubenswrapper[4875]: I1007 08:15:54.801709 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Oct 07 08:15:54 crc kubenswrapper[4875]: I1007 08:15:54.946047 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"cd2084f6-1596-45c2-a4ba-1cbd7a1ca565","Type":"ContainerStarted","Data":"0736f42ce9538b22cd85cd0a81376dc278b92cd43a979219b879a0eebf77f329"} Oct 07 08:15:55 crc kubenswrapper[4875]: I1007 08:15:55.180107 4875 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="c56873e1-d427-407d-99f5-bde715855984" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.194:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Oct 07 08:15:55 crc kubenswrapper[4875]: I1007 08:15:55.180154 4875 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="c56873e1-d427-407d-99f5-bde715855984" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.194:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Oct 07 08:15:55 crc kubenswrapper[4875]: I1007 08:15:55.369110 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 07 08:15:55 crc kubenswrapper[4875]: I1007 08:15:55.369774 4875 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="05e99478-4905-4598-9258-d4ffe06d1d97" containerName="ceilometer-central-agent" containerID="cri-o://51e93f6a01db1247b670b9f637c5acb50c5fbe58d30f27bf5db4a07c39494e8b" gracePeriod=30 Oct 07 08:15:55 crc kubenswrapper[4875]: I1007 08:15:55.370103 4875 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="05e99478-4905-4598-9258-d4ffe06d1d97" containerName="proxy-httpd" containerID="cri-o://47c7534704be3bf462966dbb27aa44236cc60bc13604d93bbed9e764421a00cf" gracePeriod=30 Oct 07 08:15:55 crc kubenswrapper[4875]: I1007 08:15:55.370313 4875 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="05e99478-4905-4598-9258-d4ffe06d1d97" containerName="ceilometer-notification-agent" containerID="cri-o://71d27a07deb2804cee3c169a57830f02a9701ff7c0db035d7ca15683bbb8d593" gracePeriod=30 Oct 07 08:15:55 crc kubenswrapper[4875]: I1007 08:15:55.370358 4875 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="05e99478-4905-4598-9258-d4ffe06d1d97" containerName="sg-core" containerID="cri-o://f8d4dc71c72356b8c39cb49745e8454a478738cc8613a13f6c17df886392dc97" gracePeriod=30 Oct 07 08:15:55 crc kubenswrapper[4875]: I1007 08:15:55.707351 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c187d3cc-62d2-43b8-9616-03670de368da" path="/var/lib/kubelet/pods/c187d3cc-62d2-43b8-9616-03670de368da/volumes" Oct 07 08:15:55 crc kubenswrapper[4875]: I1007 08:15:55.960041 4875 generic.go:334] "Generic (PLEG): container finished" podID="05e99478-4905-4598-9258-d4ffe06d1d97" containerID="47c7534704be3bf462966dbb27aa44236cc60bc13604d93bbed9e764421a00cf" exitCode=0 Oct 07 08:15:55 crc kubenswrapper[4875]: I1007 08:15:55.960089 4875 generic.go:334] "Generic (PLEG): container finished" podID="05e99478-4905-4598-9258-d4ffe06d1d97" containerID="f8d4dc71c72356b8c39cb49745e8454a478738cc8613a13f6c17df886392dc97" exitCode=2 Oct 07 08:15:55 crc kubenswrapper[4875]: I1007 08:15:55.960102 4875 generic.go:334] "Generic (PLEG): container finished" podID="05e99478-4905-4598-9258-d4ffe06d1d97" containerID="51e93f6a01db1247b670b9f637c5acb50c5fbe58d30f27bf5db4a07c39494e8b" exitCode=0 Oct 07 08:15:55 crc kubenswrapper[4875]: I1007 08:15:55.960113 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"05e99478-4905-4598-9258-d4ffe06d1d97","Type":"ContainerDied","Data":"47c7534704be3bf462966dbb27aa44236cc60bc13604d93bbed9e764421a00cf"} Oct 07 08:15:55 crc kubenswrapper[4875]: I1007 08:15:55.960183 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"05e99478-4905-4598-9258-d4ffe06d1d97","Type":"ContainerDied","Data":"f8d4dc71c72356b8c39cb49745e8454a478738cc8613a13f6c17df886392dc97"} Oct 07 08:15:55 crc kubenswrapper[4875]: I1007 08:15:55.960195 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"05e99478-4905-4598-9258-d4ffe06d1d97","Type":"ContainerDied","Data":"51e93f6a01db1247b670b9f637c5acb50c5fbe58d30f27bf5db4a07c39494e8b"} Oct 07 08:15:55 crc kubenswrapper[4875]: I1007 08:15:55.962488 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"cd2084f6-1596-45c2-a4ba-1cbd7a1ca565","Type":"ContainerStarted","Data":"87500f72e8234b1689f3b38e429c0058072bfd6ab7cf5a7d8075e80d83f1a12c"} Oct 07 08:15:55 crc kubenswrapper[4875]: I1007 08:15:55.963786 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Oct 07 08:15:55 crc kubenswrapper[4875]: I1007 08:15:55.990153 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=2.606207034 podStartE2EDuration="2.990134509s" podCreationTimestamp="2025-10-07 08:15:53 +0000 UTC" firstStartedPulling="2025-10-07 08:15:54.830818901 +0000 UTC m=+1179.790589444" lastFinishedPulling="2025-10-07 08:15:55.214746376 +0000 UTC m=+1180.174516919" observedRunningTime="2025-10-07 08:15:55.981757151 +0000 UTC m=+1180.941527704" watchObservedRunningTime="2025-10-07 08:15:55.990134509 +0000 UTC m=+1180.949905042" Oct 07 08:15:56 crc kubenswrapper[4875]: E1007 08:15:56.155181 4875 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod05e99478_4905_4598_9258_d4ffe06d1d97.slice/crio-conmon-71d27a07deb2804cee3c169a57830f02a9701ff7c0db035d7ca15683bbb8d593.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod05e99478_4905_4598_9258_d4ffe06d1d97.slice/crio-71d27a07deb2804cee3c169a57830f02a9701ff7c0db035d7ca15683bbb8d593.scope\": RecentStats: unable to find data in memory cache]" Oct 07 08:15:56 crc kubenswrapper[4875]: I1007 08:15:56.480359 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 07 08:15:56 crc kubenswrapper[4875]: I1007 08:15:56.566132 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7bfp7\" (UniqueName: \"kubernetes.io/projected/05e99478-4905-4598-9258-d4ffe06d1d97-kube-api-access-7bfp7\") pod \"05e99478-4905-4598-9258-d4ffe06d1d97\" (UID: \"05e99478-4905-4598-9258-d4ffe06d1d97\") " Oct 07 08:15:56 crc kubenswrapper[4875]: I1007 08:15:56.566223 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/05e99478-4905-4598-9258-d4ffe06d1d97-log-httpd\") pod \"05e99478-4905-4598-9258-d4ffe06d1d97\" (UID: \"05e99478-4905-4598-9258-d4ffe06d1d97\") " Oct 07 08:15:56 crc kubenswrapper[4875]: I1007 08:15:56.566362 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/05e99478-4905-4598-9258-d4ffe06d1d97-run-httpd\") pod \"05e99478-4905-4598-9258-d4ffe06d1d97\" (UID: \"05e99478-4905-4598-9258-d4ffe06d1d97\") " Oct 07 08:15:56 crc kubenswrapper[4875]: I1007 08:15:56.566462 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/05e99478-4905-4598-9258-d4ffe06d1d97-scripts\") pod \"05e99478-4905-4598-9258-d4ffe06d1d97\" (UID: \"05e99478-4905-4598-9258-d4ffe06d1d97\") " Oct 07 08:15:56 crc kubenswrapper[4875]: I1007 08:15:56.566502 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/05e99478-4905-4598-9258-d4ffe06d1d97-config-data\") pod \"05e99478-4905-4598-9258-d4ffe06d1d97\" (UID: \"05e99478-4905-4598-9258-d4ffe06d1d97\") " Oct 07 08:15:56 crc kubenswrapper[4875]: I1007 08:15:56.566646 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/05e99478-4905-4598-9258-d4ffe06d1d97-sg-core-conf-yaml\") pod \"05e99478-4905-4598-9258-d4ffe06d1d97\" (UID: \"05e99478-4905-4598-9258-d4ffe06d1d97\") " Oct 07 08:15:56 crc kubenswrapper[4875]: I1007 08:15:56.566685 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/05e99478-4905-4598-9258-d4ffe06d1d97-combined-ca-bundle\") pod \"05e99478-4905-4598-9258-d4ffe06d1d97\" (UID: \"05e99478-4905-4598-9258-d4ffe06d1d97\") " Oct 07 08:15:56 crc kubenswrapper[4875]: I1007 08:15:56.567572 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/05e99478-4905-4598-9258-d4ffe06d1d97-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "05e99478-4905-4598-9258-d4ffe06d1d97" (UID: "05e99478-4905-4598-9258-d4ffe06d1d97"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 08:15:56 crc kubenswrapper[4875]: I1007 08:15:56.567785 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/05e99478-4905-4598-9258-d4ffe06d1d97-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "05e99478-4905-4598-9258-d4ffe06d1d97" (UID: "05e99478-4905-4598-9258-d4ffe06d1d97"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 08:15:56 crc kubenswrapper[4875]: I1007 08:15:56.568598 4875 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/05e99478-4905-4598-9258-d4ffe06d1d97-log-httpd\") on node \"crc\" DevicePath \"\"" Oct 07 08:15:56 crc kubenswrapper[4875]: I1007 08:15:56.568620 4875 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/05e99478-4905-4598-9258-d4ffe06d1d97-run-httpd\") on node \"crc\" DevicePath \"\"" Oct 07 08:15:56 crc kubenswrapper[4875]: I1007 08:15:56.578248 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/05e99478-4905-4598-9258-d4ffe06d1d97-kube-api-access-7bfp7" (OuterVolumeSpecName: "kube-api-access-7bfp7") pod "05e99478-4905-4598-9258-d4ffe06d1d97" (UID: "05e99478-4905-4598-9258-d4ffe06d1d97"). InnerVolumeSpecName "kube-api-access-7bfp7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 08:15:56 crc kubenswrapper[4875]: I1007 08:15:56.585818 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/05e99478-4905-4598-9258-d4ffe06d1d97-scripts" (OuterVolumeSpecName: "scripts") pod "05e99478-4905-4598-9258-d4ffe06d1d97" (UID: "05e99478-4905-4598-9258-d4ffe06d1d97"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:15:56 crc kubenswrapper[4875]: I1007 08:15:56.632177 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/05e99478-4905-4598-9258-d4ffe06d1d97-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "05e99478-4905-4598-9258-d4ffe06d1d97" (UID: "05e99478-4905-4598-9258-d4ffe06d1d97"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:15:56 crc kubenswrapper[4875]: I1007 08:15:56.671161 4875 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/05e99478-4905-4598-9258-d4ffe06d1d97-scripts\") on node \"crc\" DevicePath \"\"" Oct 07 08:15:56 crc kubenswrapper[4875]: I1007 08:15:56.671209 4875 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/05e99478-4905-4598-9258-d4ffe06d1d97-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Oct 07 08:15:56 crc kubenswrapper[4875]: I1007 08:15:56.671224 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7bfp7\" (UniqueName: \"kubernetes.io/projected/05e99478-4905-4598-9258-d4ffe06d1d97-kube-api-access-7bfp7\") on node \"crc\" DevicePath \"\"" Oct 07 08:15:56 crc kubenswrapper[4875]: I1007 08:15:56.715603 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/05e99478-4905-4598-9258-d4ffe06d1d97-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "05e99478-4905-4598-9258-d4ffe06d1d97" (UID: "05e99478-4905-4598-9258-d4ffe06d1d97"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:15:56 crc kubenswrapper[4875]: I1007 08:15:56.747711 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/05e99478-4905-4598-9258-d4ffe06d1d97-config-data" (OuterVolumeSpecName: "config-data") pod "05e99478-4905-4598-9258-d4ffe06d1d97" (UID: "05e99478-4905-4598-9258-d4ffe06d1d97"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:15:56 crc kubenswrapper[4875]: I1007 08:15:56.772186 4875 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/05e99478-4905-4598-9258-d4ffe06d1d97-config-data\") on node \"crc\" DevicePath \"\"" Oct 07 08:15:56 crc kubenswrapper[4875]: I1007 08:15:56.772230 4875 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/05e99478-4905-4598-9258-d4ffe06d1d97-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 08:15:56 crc kubenswrapper[4875]: I1007 08:15:56.975413 4875 generic.go:334] "Generic (PLEG): container finished" podID="05e99478-4905-4598-9258-d4ffe06d1d97" containerID="71d27a07deb2804cee3c169a57830f02a9701ff7c0db035d7ca15683bbb8d593" exitCode=0 Oct 07 08:15:56 crc kubenswrapper[4875]: I1007 08:15:56.976164 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 07 08:15:56 crc kubenswrapper[4875]: I1007 08:15:56.976407 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"05e99478-4905-4598-9258-d4ffe06d1d97","Type":"ContainerDied","Data":"71d27a07deb2804cee3c169a57830f02a9701ff7c0db035d7ca15683bbb8d593"} Oct 07 08:15:56 crc kubenswrapper[4875]: I1007 08:15:56.977439 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"05e99478-4905-4598-9258-d4ffe06d1d97","Type":"ContainerDied","Data":"b4d57755d1af2f71a426321ea033cd2ab8cb80db1d1c86fcecdd5e1a3394c9e9"} Oct 07 08:15:56 crc kubenswrapper[4875]: I1007 08:15:56.977460 4875 scope.go:117] "RemoveContainer" containerID="47c7534704be3bf462966dbb27aa44236cc60bc13604d93bbed9e764421a00cf" Oct 07 08:15:57 crc kubenswrapper[4875]: I1007 08:15:57.015056 4875 scope.go:117] "RemoveContainer" containerID="f8d4dc71c72356b8c39cb49745e8454a478738cc8613a13f6c17df886392dc97" Oct 07 08:15:57 crc kubenswrapper[4875]: I1007 08:15:57.047198 4875 scope.go:117] "RemoveContainer" containerID="71d27a07deb2804cee3c169a57830f02a9701ff7c0db035d7ca15683bbb8d593" Oct 07 08:15:57 crc kubenswrapper[4875]: I1007 08:15:57.047444 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 07 08:15:57 crc kubenswrapper[4875]: I1007 08:15:57.073271 4875 scope.go:117] "RemoveContainer" containerID="51e93f6a01db1247b670b9f637c5acb50c5fbe58d30f27bf5db4a07c39494e8b" Oct 07 08:15:57 crc kubenswrapper[4875]: I1007 08:15:57.081214 4875 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Oct 07 08:15:57 crc kubenswrapper[4875]: I1007 08:15:57.101563 4875 scope.go:117] "RemoveContainer" containerID="47c7534704be3bf462966dbb27aa44236cc60bc13604d93bbed9e764421a00cf" Oct 07 08:15:57 crc kubenswrapper[4875]: E1007 08:15:57.103117 4875 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"47c7534704be3bf462966dbb27aa44236cc60bc13604d93bbed9e764421a00cf\": container with ID starting with 47c7534704be3bf462966dbb27aa44236cc60bc13604d93bbed9e764421a00cf not found: ID does not exist" containerID="47c7534704be3bf462966dbb27aa44236cc60bc13604d93bbed9e764421a00cf" Oct 07 08:15:57 crc kubenswrapper[4875]: I1007 08:15:57.103164 4875 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"47c7534704be3bf462966dbb27aa44236cc60bc13604d93bbed9e764421a00cf"} err="failed to get container status \"47c7534704be3bf462966dbb27aa44236cc60bc13604d93bbed9e764421a00cf\": rpc error: code = NotFound desc = could not find container \"47c7534704be3bf462966dbb27aa44236cc60bc13604d93bbed9e764421a00cf\": container with ID starting with 47c7534704be3bf462966dbb27aa44236cc60bc13604d93bbed9e764421a00cf not found: ID does not exist" Oct 07 08:15:57 crc kubenswrapper[4875]: I1007 08:15:57.103198 4875 scope.go:117] "RemoveContainer" containerID="f8d4dc71c72356b8c39cb49745e8454a478738cc8613a13f6c17df886392dc97" Oct 07 08:15:57 crc kubenswrapper[4875]: I1007 08:15:57.103505 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Oct 07 08:15:57 crc kubenswrapper[4875]: E1007 08:15:57.103630 4875 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f8d4dc71c72356b8c39cb49745e8454a478738cc8613a13f6c17df886392dc97\": container with ID starting with f8d4dc71c72356b8c39cb49745e8454a478738cc8613a13f6c17df886392dc97 not found: ID does not exist" containerID="f8d4dc71c72356b8c39cb49745e8454a478738cc8613a13f6c17df886392dc97" Oct 07 08:15:57 crc kubenswrapper[4875]: I1007 08:15:57.103719 4875 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f8d4dc71c72356b8c39cb49745e8454a478738cc8613a13f6c17df886392dc97"} err="failed to get container status \"f8d4dc71c72356b8c39cb49745e8454a478738cc8613a13f6c17df886392dc97\": rpc error: code = NotFound desc = could not find container \"f8d4dc71c72356b8c39cb49745e8454a478738cc8613a13f6c17df886392dc97\": container with ID starting with f8d4dc71c72356b8c39cb49745e8454a478738cc8613a13f6c17df886392dc97 not found: ID does not exist" Oct 07 08:15:57 crc kubenswrapper[4875]: I1007 08:15:57.103755 4875 scope.go:117] "RemoveContainer" containerID="71d27a07deb2804cee3c169a57830f02a9701ff7c0db035d7ca15683bbb8d593" Oct 07 08:15:57 crc kubenswrapper[4875]: E1007 08:15:57.104102 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="05e99478-4905-4598-9258-d4ffe06d1d97" containerName="proxy-httpd" Oct 07 08:15:57 crc kubenswrapper[4875]: I1007 08:15:57.104123 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="05e99478-4905-4598-9258-d4ffe06d1d97" containerName="proxy-httpd" Oct 07 08:15:57 crc kubenswrapper[4875]: E1007 08:15:57.104143 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="05e99478-4905-4598-9258-d4ffe06d1d97" containerName="sg-core" Oct 07 08:15:57 crc kubenswrapper[4875]: I1007 08:15:57.104151 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="05e99478-4905-4598-9258-d4ffe06d1d97" containerName="sg-core" Oct 07 08:15:57 crc kubenswrapper[4875]: E1007 08:15:57.104165 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="05e99478-4905-4598-9258-d4ffe06d1d97" containerName="ceilometer-central-agent" Oct 07 08:15:57 crc kubenswrapper[4875]: I1007 08:15:57.104171 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="05e99478-4905-4598-9258-d4ffe06d1d97" containerName="ceilometer-central-agent" Oct 07 08:15:57 crc kubenswrapper[4875]: E1007 08:15:57.104194 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="05e99478-4905-4598-9258-d4ffe06d1d97" containerName="ceilometer-notification-agent" Oct 07 08:15:57 crc kubenswrapper[4875]: I1007 08:15:57.104201 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="05e99478-4905-4598-9258-d4ffe06d1d97" containerName="ceilometer-notification-agent" Oct 07 08:15:57 crc kubenswrapper[4875]: I1007 08:15:57.104414 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="05e99478-4905-4598-9258-d4ffe06d1d97" containerName="ceilometer-central-agent" Oct 07 08:15:57 crc kubenswrapper[4875]: I1007 08:15:57.104433 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="05e99478-4905-4598-9258-d4ffe06d1d97" containerName="proxy-httpd" Oct 07 08:15:57 crc kubenswrapper[4875]: I1007 08:15:57.104442 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="05e99478-4905-4598-9258-d4ffe06d1d97" containerName="ceilometer-notification-agent" Oct 07 08:15:57 crc kubenswrapper[4875]: I1007 08:15:57.104454 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="05e99478-4905-4598-9258-d4ffe06d1d97" containerName="sg-core" Oct 07 08:15:57 crc kubenswrapper[4875]: E1007 08:15:57.104857 4875 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"71d27a07deb2804cee3c169a57830f02a9701ff7c0db035d7ca15683bbb8d593\": container with ID starting with 71d27a07deb2804cee3c169a57830f02a9701ff7c0db035d7ca15683bbb8d593 not found: ID does not exist" containerID="71d27a07deb2804cee3c169a57830f02a9701ff7c0db035d7ca15683bbb8d593" Oct 07 08:15:57 crc kubenswrapper[4875]: I1007 08:15:57.104896 4875 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"71d27a07deb2804cee3c169a57830f02a9701ff7c0db035d7ca15683bbb8d593"} err="failed to get container status \"71d27a07deb2804cee3c169a57830f02a9701ff7c0db035d7ca15683bbb8d593\": rpc error: code = NotFound desc = could not find container \"71d27a07deb2804cee3c169a57830f02a9701ff7c0db035d7ca15683bbb8d593\": container with ID starting with 71d27a07deb2804cee3c169a57830f02a9701ff7c0db035d7ca15683bbb8d593 not found: ID does not exist" Oct 07 08:15:57 crc kubenswrapper[4875]: I1007 08:15:57.104912 4875 scope.go:117] "RemoveContainer" containerID="51e93f6a01db1247b670b9f637c5acb50c5fbe58d30f27bf5db4a07c39494e8b" Oct 07 08:15:57 crc kubenswrapper[4875]: I1007 08:15:57.106574 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 07 08:15:57 crc kubenswrapper[4875]: E1007 08:15:57.109312 4875 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"51e93f6a01db1247b670b9f637c5acb50c5fbe58d30f27bf5db4a07c39494e8b\": container with ID starting with 51e93f6a01db1247b670b9f637c5acb50c5fbe58d30f27bf5db4a07c39494e8b not found: ID does not exist" containerID="51e93f6a01db1247b670b9f637c5acb50c5fbe58d30f27bf5db4a07c39494e8b" Oct 07 08:15:57 crc kubenswrapper[4875]: I1007 08:15:57.109349 4875 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"51e93f6a01db1247b670b9f637c5acb50c5fbe58d30f27bf5db4a07c39494e8b"} err="failed to get container status \"51e93f6a01db1247b670b9f637c5acb50c5fbe58d30f27bf5db4a07c39494e8b\": rpc error: code = NotFound desc = could not find container \"51e93f6a01db1247b670b9f637c5acb50c5fbe58d30f27bf5db4a07c39494e8b\": container with ID starting with 51e93f6a01db1247b670b9f637c5acb50c5fbe58d30f27bf5db4a07c39494e8b not found: ID does not exist" Oct 07 08:15:57 crc kubenswrapper[4875]: I1007 08:15:57.116860 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Oct 07 08:15:57 crc kubenswrapper[4875]: I1007 08:15:57.116917 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Oct 07 08:15:57 crc kubenswrapper[4875]: I1007 08:15:57.117294 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Oct 07 08:15:57 crc kubenswrapper[4875]: I1007 08:15:57.118287 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 07 08:15:57 crc kubenswrapper[4875]: I1007 08:15:57.185169 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/d1f6457f-ba47-4cd9-a972-0efb66c6514f-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"d1f6457f-ba47-4cd9-a972-0efb66c6514f\") " pod="openstack/ceilometer-0" Oct 07 08:15:57 crc kubenswrapper[4875]: I1007 08:15:57.185261 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d1f6457f-ba47-4cd9-a972-0efb66c6514f-log-httpd\") pod \"ceilometer-0\" (UID: \"d1f6457f-ba47-4cd9-a972-0efb66c6514f\") " pod="openstack/ceilometer-0" Oct 07 08:15:57 crc kubenswrapper[4875]: I1007 08:15:57.185291 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d1f6457f-ba47-4cd9-a972-0efb66c6514f-config-data\") pod \"ceilometer-0\" (UID: \"d1f6457f-ba47-4cd9-a972-0efb66c6514f\") " pod="openstack/ceilometer-0" Oct 07 08:15:57 crc kubenswrapper[4875]: I1007 08:15:57.185357 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d1f6457f-ba47-4cd9-a972-0efb66c6514f-scripts\") pod \"ceilometer-0\" (UID: \"d1f6457f-ba47-4cd9-a972-0efb66c6514f\") " pod="openstack/ceilometer-0" Oct 07 08:15:57 crc kubenswrapper[4875]: I1007 08:15:57.185385 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d1f6457f-ba47-4cd9-a972-0efb66c6514f-run-httpd\") pod \"ceilometer-0\" (UID: \"d1f6457f-ba47-4cd9-a972-0efb66c6514f\") " pod="openstack/ceilometer-0" Oct 07 08:15:57 crc kubenswrapper[4875]: I1007 08:15:57.185420 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d1f6457f-ba47-4cd9-a972-0efb66c6514f-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"d1f6457f-ba47-4cd9-a972-0efb66c6514f\") " pod="openstack/ceilometer-0" Oct 07 08:15:57 crc kubenswrapper[4875]: I1007 08:15:57.185452 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jtcq4\" (UniqueName: \"kubernetes.io/projected/d1f6457f-ba47-4cd9-a972-0efb66c6514f-kube-api-access-jtcq4\") pod \"ceilometer-0\" (UID: \"d1f6457f-ba47-4cd9-a972-0efb66c6514f\") " pod="openstack/ceilometer-0" Oct 07 08:15:57 crc kubenswrapper[4875]: I1007 08:15:57.185486 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/d1f6457f-ba47-4cd9-a972-0efb66c6514f-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"d1f6457f-ba47-4cd9-a972-0efb66c6514f\") " pod="openstack/ceilometer-0" Oct 07 08:15:57 crc kubenswrapper[4875]: I1007 08:15:57.292998 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/d1f6457f-ba47-4cd9-a972-0efb66c6514f-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"d1f6457f-ba47-4cd9-a972-0efb66c6514f\") " pod="openstack/ceilometer-0" Oct 07 08:15:57 crc kubenswrapper[4875]: I1007 08:15:57.293079 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d1f6457f-ba47-4cd9-a972-0efb66c6514f-log-httpd\") pod \"ceilometer-0\" (UID: \"d1f6457f-ba47-4cd9-a972-0efb66c6514f\") " pod="openstack/ceilometer-0" Oct 07 08:15:57 crc kubenswrapper[4875]: I1007 08:15:57.293145 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d1f6457f-ba47-4cd9-a972-0efb66c6514f-config-data\") pod \"ceilometer-0\" (UID: \"d1f6457f-ba47-4cd9-a972-0efb66c6514f\") " pod="openstack/ceilometer-0" Oct 07 08:15:57 crc kubenswrapper[4875]: I1007 08:15:57.293198 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d1f6457f-ba47-4cd9-a972-0efb66c6514f-scripts\") pod \"ceilometer-0\" (UID: \"d1f6457f-ba47-4cd9-a972-0efb66c6514f\") " pod="openstack/ceilometer-0" Oct 07 08:15:57 crc kubenswrapper[4875]: I1007 08:15:57.293224 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d1f6457f-ba47-4cd9-a972-0efb66c6514f-run-httpd\") pod \"ceilometer-0\" (UID: \"d1f6457f-ba47-4cd9-a972-0efb66c6514f\") " pod="openstack/ceilometer-0" Oct 07 08:15:57 crc kubenswrapper[4875]: I1007 08:15:57.293258 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d1f6457f-ba47-4cd9-a972-0efb66c6514f-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"d1f6457f-ba47-4cd9-a972-0efb66c6514f\") " pod="openstack/ceilometer-0" Oct 07 08:15:57 crc kubenswrapper[4875]: I1007 08:15:57.293304 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jtcq4\" (UniqueName: \"kubernetes.io/projected/d1f6457f-ba47-4cd9-a972-0efb66c6514f-kube-api-access-jtcq4\") pod \"ceilometer-0\" (UID: \"d1f6457f-ba47-4cd9-a972-0efb66c6514f\") " pod="openstack/ceilometer-0" Oct 07 08:15:57 crc kubenswrapper[4875]: I1007 08:15:57.293367 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/d1f6457f-ba47-4cd9-a972-0efb66c6514f-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"d1f6457f-ba47-4cd9-a972-0efb66c6514f\") " pod="openstack/ceilometer-0" Oct 07 08:15:57 crc kubenswrapper[4875]: I1007 08:15:57.295227 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d1f6457f-ba47-4cd9-a972-0efb66c6514f-log-httpd\") pod \"ceilometer-0\" (UID: \"d1f6457f-ba47-4cd9-a972-0efb66c6514f\") " pod="openstack/ceilometer-0" Oct 07 08:15:57 crc kubenswrapper[4875]: I1007 08:15:57.296163 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d1f6457f-ba47-4cd9-a972-0efb66c6514f-run-httpd\") pod \"ceilometer-0\" (UID: \"d1f6457f-ba47-4cd9-a972-0efb66c6514f\") " pod="openstack/ceilometer-0" Oct 07 08:15:57 crc kubenswrapper[4875]: I1007 08:15:57.300787 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d1f6457f-ba47-4cd9-a972-0efb66c6514f-scripts\") pod \"ceilometer-0\" (UID: \"d1f6457f-ba47-4cd9-a972-0efb66c6514f\") " pod="openstack/ceilometer-0" Oct 07 08:15:57 crc kubenswrapper[4875]: I1007 08:15:57.300790 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/d1f6457f-ba47-4cd9-a972-0efb66c6514f-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"d1f6457f-ba47-4cd9-a972-0efb66c6514f\") " pod="openstack/ceilometer-0" Oct 07 08:15:57 crc kubenswrapper[4875]: I1007 08:15:57.302162 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d1f6457f-ba47-4cd9-a972-0efb66c6514f-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"d1f6457f-ba47-4cd9-a972-0efb66c6514f\") " pod="openstack/ceilometer-0" Oct 07 08:15:57 crc kubenswrapper[4875]: I1007 08:15:57.303199 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/d1f6457f-ba47-4cd9-a972-0efb66c6514f-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"d1f6457f-ba47-4cd9-a972-0efb66c6514f\") " pod="openstack/ceilometer-0" Oct 07 08:15:57 crc kubenswrapper[4875]: I1007 08:15:57.303392 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d1f6457f-ba47-4cd9-a972-0efb66c6514f-config-data\") pod \"ceilometer-0\" (UID: \"d1f6457f-ba47-4cd9-a972-0efb66c6514f\") " pod="openstack/ceilometer-0" Oct 07 08:15:57 crc kubenswrapper[4875]: I1007 08:15:57.316382 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jtcq4\" (UniqueName: \"kubernetes.io/projected/d1f6457f-ba47-4cd9-a972-0efb66c6514f-kube-api-access-jtcq4\") pod \"ceilometer-0\" (UID: \"d1f6457f-ba47-4cd9-a972-0efb66c6514f\") " pod="openstack/ceilometer-0" Oct 07 08:15:57 crc kubenswrapper[4875]: I1007 08:15:57.439498 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 07 08:15:57 crc kubenswrapper[4875]: I1007 08:15:57.718514 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="05e99478-4905-4598-9258-d4ffe06d1d97" path="/var/lib/kubelet/pods/05e99478-4905-4598-9258-d4ffe06d1d97/volumes" Oct 07 08:15:57 crc kubenswrapper[4875]: I1007 08:15:57.925746 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 07 08:15:57 crc kubenswrapper[4875]: W1007 08:15:57.927848 4875 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd1f6457f_ba47_4cd9_a972_0efb66c6514f.slice/crio-d32f74db835329fbfca2b3301f9fa4bf5c98cb0d523f1e7c6e01d58800ae2383 WatchSource:0}: Error finding container d32f74db835329fbfca2b3301f9fa4bf5c98cb0d523f1e7c6e01d58800ae2383: Status 404 returned error can't find the container with id d32f74db835329fbfca2b3301f9fa4bf5c98cb0d523f1e7c6e01d58800ae2383 Oct 07 08:15:57 crc kubenswrapper[4875]: I1007 08:15:57.992429 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d1f6457f-ba47-4cd9-a972-0efb66c6514f","Type":"ContainerStarted","Data":"d32f74db835329fbfca2b3301f9fa4bf5c98cb0d523f1e7c6e01d58800ae2383"} Oct 07 08:15:59 crc kubenswrapper[4875]: I1007 08:15:59.004296 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d1f6457f-ba47-4cd9-a972-0efb66c6514f","Type":"ContainerStarted","Data":"3e97a8753274753a3934d5c8dcc4545f97342e2b371bc8fca9acfe1e100e7e9c"} Oct 07 08:15:59 crc kubenswrapper[4875]: I1007 08:15:59.264738 4875 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Oct 07 08:15:59 crc kubenswrapper[4875]: I1007 08:15:59.265269 4875 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Oct 07 08:15:59 crc kubenswrapper[4875]: I1007 08:15:59.500675 4875 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Oct 07 08:15:59 crc kubenswrapper[4875]: I1007 08:15:59.534942 4875 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Oct 07 08:16:00 crc kubenswrapper[4875]: I1007 08:16:00.020707 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d1f6457f-ba47-4cd9-a972-0efb66c6514f","Type":"ContainerStarted","Data":"e6d3906e53422aafd7693a516fd1a217547037d07f728611484f8ed94ca062ee"} Oct 07 08:16:00 crc kubenswrapper[4875]: I1007 08:16:00.070551 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Oct 07 08:16:00 crc kubenswrapper[4875]: I1007 08:16:00.348179 4875 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="6202e6ec-74a4-4e11-a274-51bb9ca4a211" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.196:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 07 08:16:00 crc kubenswrapper[4875]: I1007 08:16:00.348193 4875 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="6202e6ec-74a4-4e11-a274-51bb9ca4a211" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.196:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 07 08:16:01 crc kubenswrapper[4875]: I1007 08:16:01.065710 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d1f6457f-ba47-4cd9-a972-0efb66c6514f","Type":"ContainerStarted","Data":"b853f11f8111eabc5f96b724fcb4812bc69ace234291a62023d09b8acb8c29af"} Oct 07 08:16:02 crc kubenswrapper[4875]: I1007 08:16:02.076074 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d1f6457f-ba47-4cd9-a972-0efb66c6514f","Type":"ContainerStarted","Data":"de8a95992c17127bbc5516d3d84e198bfd5b80424143aca3ca64fd68638aac70"} Oct 07 08:16:02 crc kubenswrapper[4875]: I1007 08:16:02.076582 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Oct 07 08:16:02 crc kubenswrapper[4875]: I1007 08:16:02.105393 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.86609337 podStartE2EDuration="5.105375728s" podCreationTimestamp="2025-10-07 08:15:57 +0000 UTC" firstStartedPulling="2025-10-07 08:15:57.930933325 +0000 UTC m=+1182.890703868" lastFinishedPulling="2025-10-07 08:16:01.170215683 +0000 UTC m=+1186.129986226" observedRunningTime="2025-10-07 08:16:02.098483677 +0000 UTC m=+1187.058254230" watchObservedRunningTime="2025-10-07 08:16:02.105375728 +0000 UTC m=+1187.065146271" Oct 07 08:16:04 crc kubenswrapper[4875]: I1007 08:16:04.167925 4875 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Oct 07 08:16:04 crc kubenswrapper[4875]: I1007 08:16:04.173655 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Oct 07 08:16:04 crc kubenswrapper[4875]: I1007 08:16:04.182174 4875 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Oct 07 08:16:04 crc kubenswrapper[4875]: I1007 08:16:04.270190 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Oct 07 08:16:05 crc kubenswrapper[4875]: I1007 08:16:05.110234 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Oct 07 08:16:07 crc kubenswrapper[4875]: I1007 08:16:07.956511 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Oct 07 08:16:08 crc kubenswrapper[4875]: I1007 08:16:08.058472 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hlp4r\" (UniqueName: \"kubernetes.io/projected/68a6ccf0-417c-4432-80b8-a0a66b545a88-kube-api-access-hlp4r\") pod \"68a6ccf0-417c-4432-80b8-a0a66b545a88\" (UID: \"68a6ccf0-417c-4432-80b8-a0a66b545a88\") " Oct 07 08:16:08 crc kubenswrapper[4875]: I1007 08:16:08.058724 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/68a6ccf0-417c-4432-80b8-a0a66b545a88-combined-ca-bundle\") pod \"68a6ccf0-417c-4432-80b8-a0a66b545a88\" (UID: \"68a6ccf0-417c-4432-80b8-a0a66b545a88\") " Oct 07 08:16:08 crc kubenswrapper[4875]: I1007 08:16:08.059020 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/68a6ccf0-417c-4432-80b8-a0a66b545a88-config-data\") pod \"68a6ccf0-417c-4432-80b8-a0a66b545a88\" (UID: \"68a6ccf0-417c-4432-80b8-a0a66b545a88\") " Oct 07 08:16:08 crc kubenswrapper[4875]: I1007 08:16:08.071246 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/68a6ccf0-417c-4432-80b8-a0a66b545a88-kube-api-access-hlp4r" (OuterVolumeSpecName: "kube-api-access-hlp4r") pod "68a6ccf0-417c-4432-80b8-a0a66b545a88" (UID: "68a6ccf0-417c-4432-80b8-a0a66b545a88"). InnerVolumeSpecName "kube-api-access-hlp4r". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 08:16:08 crc kubenswrapper[4875]: I1007 08:16:08.104170 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/68a6ccf0-417c-4432-80b8-a0a66b545a88-config-data" (OuterVolumeSpecName: "config-data") pod "68a6ccf0-417c-4432-80b8-a0a66b545a88" (UID: "68a6ccf0-417c-4432-80b8-a0a66b545a88"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:16:08 crc kubenswrapper[4875]: I1007 08:16:08.109221 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/68a6ccf0-417c-4432-80b8-a0a66b545a88-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "68a6ccf0-417c-4432-80b8-a0a66b545a88" (UID: "68a6ccf0-417c-4432-80b8-a0a66b545a88"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:16:08 crc kubenswrapper[4875]: I1007 08:16:08.136924 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"68a6ccf0-417c-4432-80b8-a0a66b545a88","Type":"ContainerDied","Data":"5783bd79e8a92f64fcb51ace08d0bfd48321d2bfe5be82df768d5f656c6a90fc"} Oct 07 08:16:08 crc kubenswrapper[4875]: I1007 08:16:08.137004 4875 scope.go:117] "RemoveContainer" containerID="5783bd79e8a92f64fcb51ace08d0bfd48321d2bfe5be82df768d5f656c6a90fc" Oct 07 08:16:08 crc kubenswrapper[4875]: I1007 08:16:08.137246 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Oct 07 08:16:08 crc kubenswrapper[4875]: I1007 08:16:08.136838 4875 generic.go:334] "Generic (PLEG): container finished" podID="68a6ccf0-417c-4432-80b8-a0a66b545a88" containerID="5783bd79e8a92f64fcb51ace08d0bfd48321d2bfe5be82df768d5f656c6a90fc" exitCode=137 Oct 07 08:16:08 crc kubenswrapper[4875]: I1007 08:16:08.137434 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"68a6ccf0-417c-4432-80b8-a0a66b545a88","Type":"ContainerDied","Data":"798bebf80660c92520ccbad4a4ae40c1e96cf110d5ef3f978c241601883bc0a6"} Oct 07 08:16:08 crc kubenswrapper[4875]: I1007 08:16:08.160390 4875 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/68a6ccf0-417c-4432-80b8-a0a66b545a88-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 08:16:08 crc kubenswrapper[4875]: I1007 08:16:08.160432 4875 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/68a6ccf0-417c-4432-80b8-a0a66b545a88-config-data\") on node \"crc\" DevicePath \"\"" Oct 07 08:16:08 crc kubenswrapper[4875]: I1007 08:16:08.160445 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hlp4r\" (UniqueName: \"kubernetes.io/projected/68a6ccf0-417c-4432-80b8-a0a66b545a88-kube-api-access-hlp4r\") on node \"crc\" DevicePath \"\"" Oct 07 08:16:08 crc kubenswrapper[4875]: I1007 08:16:08.212640 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 07 08:16:08 crc kubenswrapper[4875]: I1007 08:16:08.216038 4875 scope.go:117] "RemoveContainer" containerID="5783bd79e8a92f64fcb51ace08d0bfd48321d2bfe5be82df768d5f656c6a90fc" Oct 07 08:16:08 crc kubenswrapper[4875]: E1007 08:16:08.216728 4875 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5783bd79e8a92f64fcb51ace08d0bfd48321d2bfe5be82df768d5f656c6a90fc\": container with ID starting with 5783bd79e8a92f64fcb51ace08d0bfd48321d2bfe5be82df768d5f656c6a90fc not found: ID does not exist" containerID="5783bd79e8a92f64fcb51ace08d0bfd48321d2bfe5be82df768d5f656c6a90fc" Oct 07 08:16:08 crc kubenswrapper[4875]: I1007 08:16:08.216760 4875 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5783bd79e8a92f64fcb51ace08d0bfd48321d2bfe5be82df768d5f656c6a90fc"} err="failed to get container status \"5783bd79e8a92f64fcb51ace08d0bfd48321d2bfe5be82df768d5f656c6a90fc\": rpc error: code = NotFound desc = could not find container \"5783bd79e8a92f64fcb51ace08d0bfd48321d2bfe5be82df768d5f656c6a90fc\": container with ID starting with 5783bd79e8a92f64fcb51ace08d0bfd48321d2bfe5be82df768d5f656c6a90fc not found: ID does not exist" Oct 07 08:16:08 crc kubenswrapper[4875]: I1007 08:16:08.224084 4875 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 07 08:16:08 crc kubenswrapper[4875]: I1007 08:16:08.245022 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 07 08:16:08 crc kubenswrapper[4875]: E1007 08:16:08.249103 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="68a6ccf0-417c-4432-80b8-a0a66b545a88" containerName="nova-cell1-novncproxy-novncproxy" Oct 07 08:16:08 crc kubenswrapper[4875]: I1007 08:16:08.249143 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="68a6ccf0-417c-4432-80b8-a0a66b545a88" containerName="nova-cell1-novncproxy-novncproxy" Oct 07 08:16:08 crc kubenswrapper[4875]: I1007 08:16:08.250115 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="68a6ccf0-417c-4432-80b8-a0a66b545a88" containerName="nova-cell1-novncproxy-novncproxy" Oct 07 08:16:08 crc kubenswrapper[4875]: I1007 08:16:08.251824 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Oct 07 08:16:08 crc kubenswrapper[4875]: I1007 08:16:08.255520 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-public-svc" Oct 07 08:16:08 crc kubenswrapper[4875]: I1007 08:16:08.255676 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Oct 07 08:16:08 crc kubenswrapper[4875]: I1007 08:16:08.255872 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-vencrypt" Oct 07 08:16:08 crc kubenswrapper[4875]: I1007 08:16:08.265469 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/e35464b6-e9ab-4262-a99d-efad04dbd0e0-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"e35464b6-e9ab-4262-a99d-efad04dbd0e0\") " pod="openstack/nova-cell1-novncproxy-0" Oct 07 08:16:08 crc kubenswrapper[4875]: I1007 08:16:08.265530 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cthfl\" (UniqueName: \"kubernetes.io/projected/e35464b6-e9ab-4262-a99d-efad04dbd0e0-kube-api-access-cthfl\") pod \"nova-cell1-novncproxy-0\" (UID: \"e35464b6-e9ab-4262-a99d-efad04dbd0e0\") " pod="openstack/nova-cell1-novncproxy-0" Oct 07 08:16:08 crc kubenswrapper[4875]: I1007 08:16:08.266330 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e35464b6-e9ab-4262-a99d-efad04dbd0e0-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"e35464b6-e9ab-4262-a99d-efad04dbd0e0\") " pod="openstack/nova-cell1-novncproxy-0" Oct 07 08:16:08 crc kubenswrapper[4875]: I1007 08:16:08.266415 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e35464b6-e9ab-4262-a99d-efad04dbd0e0-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"e35464b6-e9ab-4262-a99d-efad04dbd0e0\") " pod="openstack/nova-cell1-novncproxy-0" Oct 07 08:16:08 crc kubenswrapper[4875]: I1007 08:16:08.266557 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/e35464b6-e9ab-4262-a99d-efad04dbd0e0-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"e35464b6-e9ab-4262-a99d-efad04dbd0e0\") " pod="openstack/nova-cell1-novncproxy-0" Oct 07 08:16:08 crc kubenswrapper[4875]: I1007 08:16:08.290253 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 07 08:16:08 crc kubenswrapper[4875]: I1007 08:16:08.369452 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/e35464b6-e9ab-4262-a99d-efad04dbd0e0-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"e35464b6-e9ab-4262-a99d-efad04dbd0e0\") " pod="openstack/nova-cell1-novncproxy-0" Oct 07 08:16:08 crc kubenswrapper[4875]: I1007 08:16:08.369576 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/e35464b6-e9ab-4262-a99d-efad04dbd0e0-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"e35464b6-e9ab-4262-a99d-efad04dbd0e0\") " pod="openstack/nova-cell1-novncproxy-0" Oct 07 08:16:08 crc kubenswrapper[4875]: I1007 08:16:08.369619 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cthfl\" (UniqueName: \"kubernetes.io/projected/e35464b6-e9ab-4262-a99d-efad04dbd0e0-kube-api-access-cthfl\") pod \"nova-cell1-novncproxy-0\" (UID: \"e35464b6-e9ab-4262-a99d-efad04dbd0e0\") " pod="openstack/nova-cell1-novncproxy-0" Oct 07 08:16:08 crc kubenswrapper[4875]: I1007 08:16:08.369769 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e35464b6-e9ab-4262-a99d-efad04dbd0e0-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"e35464b6-e9ab-4262-a99d-efad04dbd0e0\") " pod="openstack/nova-cell1-novncproxy-0" Oct 07 08:16:08 crc kubenswrapper[4875]: I1007 08:16:08.369804 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e35464b6-e9ab-4262-a99d-efad04dbd0e0-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"e35464b6-e9ab-4262-a99d-efad04dbd0e0\") " pod="openstack/nova-cell1-novncproxy-0" Oct 07 08:16:08 crc kubenswrapper[4875]: I1007 08:16:08.375675 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/e35464b6-e9ab-4262-a99d-efad04dbd0e0-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"e35464b6-e9ab-4262-a99d-efad04dbd0e0\") " pod="openstack/nova-cell1-novncproxy-0" Oct 07 08:16:08 crc kubenswrapper[4875]: I1007 08:16:08.375745 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e35464b6-e9ab-4262-a99d-efad04dbd0e0-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"e35464b6-e9ab-4262-a99d-efad04dbd0e0\") " pod="openstack/nova-cell1-novncproxy-0" Oct 07 08:16:08 crc kubenswrapper[4875]: I1007 08:16:08.375963 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e35464b6-e9ab-4262-a99d-efad04dbd0e0-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"e35464b6-e9ab-4262-a99d-efad04dbd0e0\") " pod="openstack/nova-cell1-novncproxy-0" Oct 07 08:16:08 crc kubenswrapper[4875]: I1007 08:16:08.375869 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/e35464b6-e9ab-4262-a99d-efad04dbd0e0-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"e35464b6-e9ab-4262-a99d-efad04dbd0e0\") " pod="openstack/nova-cell1-novncproxy-0" Oct 07 08:16:08 crc kubenswrapper[4875]: I1007 08:16:08.427469 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cthfl\" (UniqueName: \"kubernetes.io/projected/e35464b6-e9ab-4262-a99d-efad04dbd0e0-kube-api-access-cthfl\") pod \"nova-cell1-novncproxy-0\" (UID: \"e35464b6-e9ab-4262-a99d-efad04dbd0e0\") " pod="openstack/nova-cell1-novncproxy-0" Oct 07 08:16:08 crc kubenswrapper[4875]: I1007 08:16:08.580553 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Oct 07 08:16:09 crc kubenswrapper[4875]: I1007 08:16:09.114934 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 07 08:16:09 crc kubenswrapper[4875]: W1007 08:16:09.116771 4875 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode35464b6_e9ab_4262_a99d_efad04dbd0e0.slice/crio-a0a69aee9d617e2d3da4fad8274266435aef83d5ac1e454f8baa3bde739b235a WatchSource:0}: Error finding container a0a69aee9d617e2d3da4fad8274266435aef83d5ac1e454f8baa3bde739b235a: Status 404 returned error can't find the container with id a0a69aee9d617e2d3da4fad8274266435aef83d5ac1e454f8baa3bde739b235a Oct 07 08:16:09 crc kubenswrapper[4875]: I1007 08:16:09.152610 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"e35464b6-e9ab-4262-a99d-efad04dbd0e0","Type":"ContainerStarted","Data":"a0a69aee9d617e2d3da4fad8274266435aef83d5ac1e454f8baa3bde739b235a"} Oct 07 08:16:09 crc kubenswrapper[4875]: I1007 08:16:09.269564 4875 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Oct 07 08:16:09 crc kubenswrapper[4875]: I1007 08:16:09.270214 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Oct 07 08:16:09 crc kubenswrapper[4875]: I1007 08:16:09.273226 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Oct 07 08:16:09 crc kubenswrapper[4875]: I1007 08:16:09.275493 4875 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Oct 07 08:16:09 crc kubenswrapper[4875]: I1007 08:16:09.710275 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="68a6ccf0-417c-4432-80b8-a0a66b545a88" path="/var/lib/kubelet/pods/68a6ccf0-417c-4432-80b8-a0a66b545a88/volumes" Oct 07 08:16:10 crc kubenswrapper[4875]: I1007 08:16:10.171714 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"e35464b6-e9ab-4262-a99d-efad04dbd0e0","Type":"ContainerStarted","Data":"70926beec67ee7d36804ea900ea021736ad76d3abc74de4a734371f39d7601e6"} Oct 07 08:16:10 crc kubenswrapper[4875]: I1007 08:16:10.171801 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Oct 07 08:16:10 crc kubenswrapper[4875]: I1007 08:16:10.178116 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Oct 07 08:16:10 crc kubenswrapper[4875]: I1007 08:16:10.214007 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=2.213971463 podStartE2EDuration="2.213971463s" podCreationTimestamp="2025-10-07 08:16:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 08:16:10.19729389 +0000 UTC m=+1195.157064433" watchObservedRunningTime="2025-10-07 08:16:10.213971463 +0000 UTC m=+1195.173742046" Oct 07 08:16:10 crc kubenswrapper[4875]: I1007 08:16:10.427957 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-cd5cbd7b9-bpsf7"] Oct 07 08:16:10 crc kubenswrapper[4875]: I1007 08:16:10.430199 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-cd5cbd7b9-bpsf7" Oct 07 08:16:10 crc kubenswrapper[4875]: I1007 08:16:10.451607 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-cd5cbd7b9-bpsf7"] Oct 07 08:16:10 crc kubenswrapper[4875]: I1007 08:16:10.530195 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/fc21aba7-0a91-4583-9c30-7b5f8efcb165-dns-svc\") pod \"dnsmasq-dns-cd5cbd7b9-bpsf7\" (UID: \"fc21aba7-0a91-4583-9c30-7b5f8efcb165\") " pod="openstack/dnsmasq-dns-cd5cbd7b9-bpsf7" Oct 07 08:16:10 crc kubenswrapper[4875]: I1007 08:16:10.530251 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/fc21aba7-0a91-4583-9c30-7b5f8efcb165-dns-swift-storage-0\") pod \"dnsmasq-dns-cd5cbd7b9-bpsf7\" (UID: \"fc21aba7-0a91-4583-9c30-7b5f8efcb165\") " pod="openstack/dnsmasq-dns-cd5cbd7b9-bpsf7" Oct 07 08:16:10 crc kubenswrapper[4875]: I1007 08:16:10.530292 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2df8f\" (UniqueName: \"kubernetes.io/projected/fc21aba7-0a91-4583-9c30-7b5f8efcb165-kube-api-access-2df8f\") pod \"dnsmasq-dns-cd5cbd7b9-bpsf7\" (UID: \"fc21aba7-0a91-4583-9c30-7b5f8efcb165\") " pod="openstack/dnsmasq-dns-cd5cbd7b9-bpsf7" Oct 07 08:16:10 crc kubenswrapper[4875]: I1007 08:16:10.530324 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/fc21aba7-0a91-4583-9c30-7b5f8efcb165-ovsdbserver-sb\") pod \"dnsmasq-dns-cd5cbd7b9-bpsf7\" (UID: \"fc21aba7-0a91-4583-9c30-7b5f8efcb165\") " pod="openstack/dnsmasq-dns-cd5cbd7b9-bpsf7" Oct 07 08:16:10 crc kubenswrapper[4875]: I1007 08:16:10.530367 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/fc21aba7-0a91-4583-9c30-7b5f8efcb165-ovsdbserver-nb\") pod \"dnsmasq-dns-cd5cbd7b9-bpsf7\" (UID: \"fc21aba7-0a91-4583-9c30-7b5f8efcb165\") " pod="openstack/dnsmasq-dns-cd5cbd7b9-bpsf7" Oct 07 08:16:10 crc kubenswrapper[4875]: I1007 08:16:10.530689 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fc21aba7-0a91-4583-9c30-7b5f8efcb165-config\") pod \"dnsmasq-dns-cd5cbd7b9-bpsf7\" (UID: \"fc21aba7-0a91-4583-9c30-7b5f8efcb165\") " pod="openstack/dnsmasq-dns-cd5cbd7b9-bpsf7" Oct 07 08:16:10 crc kubenswrapper[4875]: I1007 08:16:10.634004 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/fc21aba7-0a91-4583-9c30-7b5f8efcb165-dns-svc\") pod \"dnsmasq-dns-cd5cbd7b9-bpsf7\" (UID: \"fc21aba7-0a91-4583-9c30-7b5f8efcb165\") " pod="openstack/dnsmasq-dns-cd5cbd7b9-bpsf7" Oct 07 08:16:10 crc kubenswrapper[4875]: I1007 08:16:10.634063 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/fc21aba7-0a91-4583-9c30-7b5f8efcb165-dns-swift-storage-0\") pod \"dnsmasq-dns-cd5cbd7b9-bpsf7\" (UID: \"fc21aba7-0a91-4583-9c30-7b5f8efcb165\") " pod="openstack/dnsmasq-dns-cd5cbd7b9-bpsf7" Oct 07 08:16:10 crc kubenswrapper[4875]: I1007 08:16:10.634101 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2df8f\" (UniqueName: \"kubernetes.io/projected/fc21aba7-0a91-4583-9c30-7b5f8efcb165-kube-api-access-2df8f\") pod \"dnsmasq-dns-cd5cbd7b9-bpsf7\" (UID: \"fc21aba7-0a91-4583-9c30-7b5f8efcb165\") " pod="openstack/dnsmasq-dns-cd5cbd7b9-bpsf7" Oct 07 08:16:10 crc kubenswrapper[4875]: I1007 08:16:10.634126 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/fc21aba7-0a91-4583-9c30-7b5f8efcb165-ovsdbserver-sb\") pod \"dnsmasq-dns-cd5cbd7b9-bpsf7\" (UID: \"fc21aba7-0a91-4583-9c30-7b5f8efcb165\") " pod="openstack/dnsmasq-dns-cd5cbd7b9-bpsf7" Oct 07 08:16:10 crc kubenswrapper[4875]: I1007 08:16:10.634147 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/fc21aba7-0a91-4583-9c30-7b5f8efcb165-ovsdbserver-nb\") pod \"dnsmasq-dns-cd5cbd7b9-bpsf7\" (UID: \"fc21aba7-0a91-4583-9c30-7b5f8efcb165\") " pod="openstack/dnsmasq-dns-cd5cbd7b9-bpsf7" Oct 07 08:16:10 crc kubenswrapper[4875]: I1007 08:16:10.634211 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fc21aba7-0a91-4583-9c30-7b5f8efcb165-config\") pod \"dnsmasq-dns-cd5cbd7b9-bpsf7\" (UID: \"fc21aba7-0a91-4583-9c30-7b5f8efcb165\") " pod="openstack/dnsmasq-dns-cd5cbd7b9-bpsf7" Oct 07 08:16:10 crc kubenswrapper[4875]: I1007 08:16:10.635161 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fc21aba7-0a91-4583-9c30-7b5f8efcb165-config\") pod \"dnsmasq-dns-cd5cbd7b9-bpsf7\" (UID: \"fc21aba7-0a91-4583-9c30-7b5f8efcb165\") " pod="openstack/dnsmasq-dns-cd5cbd7b9-bpsf7" Oct 07 08:16:10 crc kubenswrapper[4875]: I1007 08:16:10.635702 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/fc21aba7-0a91-4583-9c30-7b5f8efcb165-dns-svc\") pod \"dnsmasq-dns-cd5cbd7b9-bpsf7\" (UID: \"fc21aba7-0a91-4583-9c30-7b5f8efcb165\") " pod="openstack/dnsmasq-dns-cd5cbd7b9-bpsf7" Oct 07 08:16:10 crc kubenswrapper[4875]: I1007 08:16:10.636224 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/fc21aba7-0a91-4583-9c30-7b5f8efcb165-dns-swift-storage-0\") pod \"dnsmasq-dns-cd5cbd7b9-bpsf7\" (UID: \"fc21aba7-0a91-4583-9c30-7b5f8efcb165\") " pod="openstack/dnsmasq-dns-cd5cbd7b9-bpsf7" Oct 07 08:16:10 crc kubenswrapper[4875]: I1007 08:16:10.637177 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/fc21aba7-0a91-4583-9c30-7b5f8efcb165-ovsdbserver-sb\") pod \"dnsmasq-dns-cd5cbd7b9-bpsf7\" (UID: \"fc21aba7-0a91-4583-9c30-7b5f8efcb165\") " pod="openstack/dnsmasq-dns-cd5cbd7b9-bpsf7" Oct 07 08:16:10 crc kubenswrapper[4875]: I1007 08:16:10.637695 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/fc21aba7-0a91-4583-9c30-7b5f8efcb165-ovsdbserver-nb\") pod \"dnsmasq-dns-cd5cbd7b9-bpsf7\" (UID: \"fc21aba7-0a91-4583-9c30-7b5f8efcb165\") " pod="openstack/dnsmasq-dns-cd5cbd7b9-bpsf7" Oct 07 08:16:10 crc kubenswrapper[4875]: I1007 08:16:10.679961 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2df8f\" (UniqueName: \"kubernetes.io/projected/fc21aba7-0a91-4583-9c30-7b5f8efcb165-kube-api-access-2df8f\") pod \"dnsmasq-dns-cd5cbd7b9-bpsf7\" (UID: \"fc21aba7-0a91-4583-9c30-7b5f8efcb165\") " pod="openstack/dnsmasq-dns-cd5cbd7b9-bpsf7" Oct 07 08:16:10 crc kubenswrapper[4875]: I1007 08:16:10.774040 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-cd5cbd7b9-bpsf7" Oct 07 08:16:11 crc kubenswrapper[4875]: W1007 08:16:11.342346 4875 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfc21aba7_0a91_4583_9c30_7b5f8efcb165.slice/crio-369377f7f9bafe82f60f9de7797863824c37799a4879394a03d2fd0b13469c8e WatchSource:0}: Error finding container 369377f7f9bafe82f60f9de7797863824c37799a4879394a03d2fd0b13469c8e: Status 404 returned error can't find the container with id 369377f7f9bafe82f60f9de7797863824c37799a4879394a03d2fd0b13469c8e Oct 07 08:16:11 crc kubenswrapper[4875]: I1007 08:16:11.349383 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-cd5cbd7b9-bpsf7"] Oct 07 08:16:12 crc kubenswrapper[4875]: I1007 08:16:12.190917 4875 generic.go:334] "Generic (PLEG): container finished" podID="fc21aba7-0a91-4583-9c30-7b5f8efcb165" containerID="6ae498c80e5cad274e16b64a9b4db5680cd54a5995ac3d55d7d3f1fdd57a3b15" exitCode=0 Oct 07 08:16:12 crc kubenswrapper[4875]: I1007 08:16:12.191079 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-cd5cbd7b9-bpsf7" event={"ID":"fc21aba7-0a91-4583-9c30-7b5f8efcb165","Type":"ContainerDied","Data":"6ae498c80e5cad274e16b64a9b4db5680cd54a5995ac3d55d7d3f1fdd57a3b15"} Oct 07 08:16:12 crc kubenswrapper[4875]: I1007 08:16:12.192119 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-cd5cbd7b9-bpsf7" event={"ID":"fc21aba7-0a91-4583-9c30-7b5f8efcb165","Type":"ContainerStarted","Data":"369377f7f9bafe82f60f9de7797863824c37799a4879394a03d2fd0b13469c8e"} Oct 07 08:16:12 crc kubenswrapper[4875]: I1007 08:16:12.978434 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 07 08:16:12 crc kubenswrapper[4875]: I1007 08:16:12.979317 4875 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="d1f6457f-ba47-4cd9-a972-0efb66c6514f" containerName="ceilometer-central-agent" containerID="cri-o://3e97a8753274753a3934d5c8dcc4545f97342e2b371bc8fca9acfe1e100e7e9c" gracePeriod=30 Oct 07 08:16:12 crc kubenswrapper[4875]: I1007 08:16:12.979466 4875 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="d1f6457f-ba47-4cd9-a972-0efb66c6514f" containerName="ceilometer-notification-agent" containerID="cri-o://e6d3906e53422aafd7693a516fd1a217547037d07f728611484f8ed94ca062ee" gracePeriod=30 Oct 07 08:16:12 crc kubenswrapper[4875]: I1007 08:16:12.979507 4875 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="d1f6457f-ba47-4cd9-a972-0efb66c6514f" containerName="sg-core" containerID="cri-o://b853f11f8111eabc5f96b724fcb4812bc69ace234291a62023d09b8acb8c29af" gracePeriod=30 Oct 07 08:16:12 crc kubenswrapper[4875]: I1007 08:16:12.979770 4875 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="d1f6457f-ba47-4cd9-a972-0efb66c6514f" containerName="proxy-httpd" containerID="cri-o://de8a95992c17127bbc5516d3d84e198bfd5b80424143aca3ca64fd68638aac70" gracePeriod=30 Oct 07 08:16:13 crc kubenswrapper[4875]: I1007 08:16:13.005509 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Oct 07 08:16:13 crc kubenswrapper[4875]: I1007 08:16:13.011137 4875 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ceilometer-0" podUID="d1f6457f-ba47-4cd9-a972-0efb66c6514f" containerName="proxy-httpd" probeResult="failure" output="Get \"https://10.217.0.199:3000/\": EOF" Oct 07 08:16:13 crc kubenswrapper[4875]: I1007 08:16:13.205919 4875 generic.go:334] "Generic (PLEG): container finished" podID="d1f6457f-ba47-4cd9-a972-0efb66c6514f" containerID="b853f11f8111eabc5f96b724fcb4812bc69ace234291a62023d09b8acb8c29af" exitCode=2 Oct 07 08:16:13 crc kubenswrapper[4875]: I1007 08:16:13.206001 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d1f6457f-ba47-4cd9-a972-0efb66c6514f","Type":"ContainerDied","Data":"b853f11f8111eabc5f96b724fcb4812bc69ace234291a62023d09b8acb8c29af"} Oct 07 08:16:13 crc kubenswrapper[4875]: I1007 08:16:13.226765 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-cd5cbd7b9-bpsf7" event={"ID":"fc21aba7-0a91-4583-9c30-7b5f8efcb165","Type":"ContainerStarted","Data":"25acfa43f917e4a4c187cae67cea596c56cfcd3e883b78c57c3f7215f897f29f"} Oct 07 08:16:13 crc kubenswrapper[4875]: I1007 08:16:13.226951 4875 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="6202e6ec-74a4-4e11-a274-51bb9ca4a211" containerName="nova-api-log" containerID="cri-o://19910f58cd42fe42a26d286bec82884d0f0f7e5bbd83e34d5da9b9f9f4d8166b" gracePeriod=30 Oct 07 08:16:13 crc kubenswrapper[4875]: I1007 08:16:13.227079 4875 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="6202e6ec-74a4-4e11-a274-51bb9ca4a211" containerName="nova-api-api" containerID="cri-o://db1c822e8f95fdfa6d08bf82ddd0dc5db04e35743f98a61a2badf088b1dd9894" gracePeriod=30 Oct 07 08:16:13 crc kubenswrapper[4875]: I1007 08:16:13.256948 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-cd5cbd7b9-bpsf7" podStartSLOduration=3.256929888 podStartE2EDuration="3.256929888s" podCreationTimestamp="2025-10-07 08:16:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 08:16:13.252539058 +0000 UTC m=+1198.212309591" watchObservedRunningTime="2025-10-07 08:16:13.256929888 +0000 UTC m=+1198.216700431" Oct 07 08:16:13 crc kubenswrapper[4875]: I1007 08:16:13.581727 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Oct 07 08:16:14 crc kubenswrapper[4875]: I1007 08:16:14.240950 4875 generic.go:334] "Generic (PLEG): container finished" podID="6202e6ec-74a4-4e11-a274-51bb9ca4a211" containerID="19910f58cd42fe42a26d286bec82884d0f0f7e5bbd83e34d5da9b9f9f4d8166b" exitCode=143 Oct 07 08:16:14 crc kubenswrapper[4875]: I1007 08:16:14.240992 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"6202e6ec-74a4-4e11-a274-51bb9ca4a211","Type":"ContainerDied","Data":"19910f58cd42fe42a26d286bec82884d0f0f7e5bbd83e34d5da9b9f9f4d8166b"} Oct 07 08:16:14 crc kubenswrapper[4875]: I1007 08:16:14.244025 4875 generic.go:334] "Generic (PLEG): container finished" podID="d1f6457f-ba47-4cd9-a972-0efb66c6514f" containerID="de8a95992c17127bbc5516d3d84e198bfd5b80424143aca3ca64fd68638aac70" exitCode=0 Oct 07 08:16:14 crc kubenswrapper[4875]: I1007 08:16:14.244065 4875 generic.go:334] "Generic (PLEG): container finished" podID="d1f6457f-ba47-4cd9-a972-0efb66c6514f" containerID="3e97a8753274753a3934d5c8dcc4545f97342e2b371bc8fca9acfe1e100e7e9c" exitCode=0 Oct 07 08:16:14 crc kubenswrapper[4875]: I1007 08:16:14.244349 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d1f6457f-ba47-4cd9-a972-0efb66c6514f","Type":"ContainerDied","Data":"de8a95992c17127bbc5516d3d84e198bfd5b80424143aca3ca64fd68638aac70"} Oct 07 08:16:14 crc kubenswrapper[4875]: I1007 08:16:14.244400 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d1f6457f-ba47-4cd9-a972-0efb66c6514f","Type":"ContainerDied","Data":"3e97a8753274753a3934d5c8dcc4545f97342e2b371bc8fca9acfe1e100e7e9c"} Oct 07 08:16:14 crc kubenswrapper[4875]: I1007 08:16:14.244417 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-cd5cbd7b9-bpsf7" Oct 07 08:16:16 crc kubenswrapper[4875]: I1007 08:16:16.270102 4875 generic.go:334] "Generic (PLEG): container finished" podID="d1f6457f-ba47-4cd9-a972-0efb66c6514f" containerID="e6d3906e53422aafd7693a516fd1a217547037d07f728611484f8ed94ca062ee" exitCode=0 Oct 07 08:16:16 crc kubenswrapper[4875]: I1007 08:16:16.270177 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d1f6457f-ba47-4cd9-a972-0efb66c6514f","Type":"ContainerDied","Data":"e6d3906e53422aafd7693a516fd1a217547037d07f728611484f8ed94ca062ee"} Oct 07 08:16:16 crc kubenswrapper[4875]: I1007 08:16:16.351765 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 07 08:16:16 crc kubenswrapper[4875]: I1007 08:16:16.433579 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jtcq4\" (UniqueName: \"kubernetes.io/projected/d1f6457f-ba47-4cd9-a972-0efb66c6514f-kube-api-access-jtcq4\") pod \"d1f6457f-ba47-4cd9-a972-0efb66c6514f\" (UID: \"d1f6457f-ba47-4cd9-a972-0efb66c6514f\") " Oct 07 08:16:16 crc kubenswrapper[4875]: I1007 08:16:16.433790 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d1f6457f-ba47-4cd9-a972-0efb66c6514f-log-httpd\") pod \"d1f6457f-ba47-4cd9-a972-0efb66c6514f\" (UID: \"d1f6457f-ba47-4cd9-a972-0efb66c6514f\") " Oct 07 08:16:16 crc kubenswrapper[4875]: I1007 08:16:16.433833 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/d1f6457f-ba47-4cd9-a972-0efb66c6514f-sg-core-conf-yaml\") pod \"d1f6457f-ba47-4cd9-a972-0efb66c6514f\" (UID: \"d1f6457f-ba47-4cd9-a972-0efb66c6514f\") " Oct 07 08:16:16 crc kubenswrapper[4875]: I1007 08:16:16.433919 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/d1f6457f-ba47-4cd9-a972-0efb66c6514f-ceilometer-tls-certs\") pod \"d1f6457f-ba47-4cd9-a972-0efb66c6514f\" (UID: \"d1f6457f-ba47-4cd9-a972-0efb66c6514f\") " Oct 07 08:16:16 crc kubenswrapper[4875]: I1007 08:16:16.433971 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d1f6457f-ba47-4cd9-a972-0efb66c6514f-config-data\") pod \"d1f6457f-ba47-4cd9-a972-0efb66c6514f\" (UID: \"d1f6457f-ba47-4cd9-a972-0efb66c6514f\") " Oct 07 08:16:16 crc kubenswrapper[4875]: I1007 08:16:16.434064 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d1f6457f-ba47-4cd9-a972-0efb66c6514f-combined-ca-bundle\") pod \"d1f6457f-ba47-4cd9-a972-0efb66c6514f\" (UID: \"d1f6457f-ba47-4cd9-a972-0efb66c6514f\") " Oct 07 08:16:16 crc kubenswrapper[4875]: I1007 08:16:16.434164 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d1f6457f-ba47-4cd9-a972-0efb66c6514f-run-httpd\") pod \"d1f6457f-ba47-4cd9-a972-0efb66c6514f\" (UID: \"d1f6457f-ba47-4cd9-a972-0efb66c6514f\") " Oct 07 08:16:16 crc kubenswrapper[4875]: I1007 08:16:16.434199 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d1f6457f-ba47-4cd9-a972-0efb66c6514f-scripts\") pod \"d1f6457f-ba47-4cd9-a972-0efb66c6514f\" (UID: \"d1f6457f-ba47-4cd9-a972-0efb66c6514f\") " Oct 07 08:16:16 crc kubenswrapper[4875]: I1007 08:16:16.434714 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d1f6457f-ba47-4cd9-a972-0efb66c6514f-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "d1f6457f-ba47-4cd9-a972-0efb66c6514f" (UID: "d1f6457f-ba47-4cd9-a972-0efb66c6514f"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 08:16:16 crc kubenswrapper[4875]: I1007 08:16:16.435083 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d1f6457f-ba47-4cd9-a972-0efb66c6514f-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "d1f6457f-ba47-4cd9-a972-0efb66c6514f" (UID: "d1f6457f-ba47-4cd9-a972-0efb66c6514f"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 08:16:16 crc kubenswrapper[4875]: I1007 08:16:16.435276 4875 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d1f6457f-ba47-4cd9-a972-0efb66c6514f-run-httpd\") on node \"crc\" DevicePath \"\"" Oct 07 08:16:16 crc kubenswrapper[4875]: I1007 08:16:16.435301 4875 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d1f6457f-ba47-4cd9-a972-0efb66c6514f-log-httpd\") on node \"crc\" DevicePath \"\"" Oct 07 08:16:16 crc kubenswrapper[4875]: I1007 08:16:16.458236 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d1f6457f-ba47-4cd9-a972-0efb66c6514f-scripts" (OuterVolumeSpecName: "scripts") pod "d1f6457f-ba47-4cd9-a972-0efb66c6514f" (UID: "d1f6457f-ba47-4cd9-a972-0efb66c6514f"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:16:16 crc kubenswrapper[4875]: I1007 08:16:16.460808 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d1f6457f-ba47-4cd9-a972-0efb66c6514f-kube-api-access-jtcq4" (OuterVolumeSpecName: "kube-api-access-jtcq4") pod "d1f6457f-ba47-4cd9-a972-0efb66c6514f" (UID: "d1f6457f-ba47-4cd9-a972-0efb66c6514f"). InnerVolumeSpecName "kube-api-access-jtcq4". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 08:16:16 crc kubenswrapper[4875]: I1007 08:16:16.471324 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d1f6457f-ba47-4cd9-a972-0efb66c6514f-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "d1f6457f-ba47-4cd9-a972-0efb66c6514f" (UID: "d1f6457f-ba47-4cd9-a972-0efb66c6514f"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:16:16 crc kubenswrapper[4875]: I1007 08:16:16.499474 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d1f6457f-ba47-4cd9-a972-0efb66c6514f-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "d1f6457f-ba47-4cd9-a972-0efb66c6514f" (UID: "d1f6457f-ba47-4cd9-a972-0efb66c6514f"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:16:16 crc kubenswrapper[4875]: I1007 08:16:16.521429 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d1f6457f-ba47-4cd9-a972-0efb66c6514f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d1f6457f-ba47-4cd9-a972-0efb66c6514f" (UID: "d1f6457f-ba47-4cd9-a972-0efb66c6514f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:16:16 crc kubenswrapper[4875]: I1007 08:16:16.537134 4875 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/d1f6457f-ba47-4cd9-a972-0efb66c6514f-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 07 08:16:16 crc kubenswrapper[4875]: I1007 08:16:16.537187 4875 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d1f6457f-ba47-4cd9-a972-0efb66c6514f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 08:16:16 crc kubenswrapper[4875]: I1007 08:16:16.537204 4875 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d1f6457f-ba47-4cd9-a972-0efb66c6514f-scripts\") on node \"crc\" DevicePath \"\"" Oct 07 08:16:16 crc kubenswrapper[4875]: I1007 08:16:16.537215 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jtcq4\" (UniqueName: \"kubernetes.io/projected/d1f6457f-ba47-4cd9-a972-0efb66c6514f-kube-api-access-jtcq4\") on node \"crc\" DevicePath \"\"" Oct 07 08:16:16 crc kubenswrapper[4875]: I1007 08:16:16.537223 4875 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/d1f6457f-ba47-4cd9-a972-0efb66c6514f-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Oct 07 08:16:16 crc kubenswrapper[4875]: I1007 08:16:16.555227 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d1f6457f-ba47-4cd9-a972-0efb66c6514f-config-data" (OuterVolumeSpecName: "config-data") pod "d1f6457f-ba47-4cd9-a972-0efb66c6514f" (UID: "d1f6457f-ba47-4cd9-a972-0efb66c6514f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:16:16 crc kubenswrapper[4875]: I1007 08:16:16.639790 4875 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d1f6457f-ba47-4cd9-a972-0efb66c6514f-config-data\") on node \"crc\" DevicePath \"\"" Oct 07 08:16:16 crc kubenswrapper[4875]: I1007 08:16:16.754111 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 07 08:16:16 crc kubenswrapper[4875]: I1007 08:16:16.842999 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xnz88\" (UniqueName: \"kubernetes.io/projected/6202e6ec-74a4-4e11-a274-51bb9ca4a211-kube-api-access-xnz88\") pod \"6202e6ec-74a4-4e11-a274-51bb9ca4a211\" (UID: \"6202e6ec-74a4-4e11-a274-51bb9ca4a211\") " Oct 07 08:16:16 crc kubenswrapper[4875]: I1007 08:16:16.843274 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6202e6ec-74a4-4e11-a274-51bb9ca4a211-config-data\") pod \"6202e6ec-74a4-4e11-a274-51bb9ca4a211\" (UID: \"6202e6ec-74a4-4e11-a274-51bb9ca4a211\") " Oct 07 08:16:16 crc kubenswrapper[4875]: I1007 08:16:16.843430 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6202e6ec-74a4-4e11-a274-51bb9ca4a211-combined-ca-bundle\") pod \"6202e6ec-74a4-4e11-a274-51bb9ca4a211\" (UID: \"6202e6ec-74a4-4e11-a274-51bb9ca4a211\") " Oct 07 08:16:16 crc kubenswrapper[4875]: I1007 08:16:16.843467 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6202e6ec-74a4-4e11-a274-51bb9ca4a211-logs\") pod \"6202e6ec-74a4-4e11-a274-51bb9ca4a211\" (UID: \"6202e6ec-74a4-4e11-a274-51bb9ca4a211\") " Oct 07 08:16:16 crc kubenswrapper[4875]: I1007 08:16:16.844401 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6202e6ec-74a4-4e11-a274-51bb9ca4a211-logs" (OuterVolumeSpecName: "logs") pod "6202e6ec-74a4-4e11-a274-51bb9ca4a211" (UID: "6202e6ec-74a4-4e11-a274-51bb9ca4a211"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 08:16:16 crc kubenswrapper[4875]: I1007 08:16:16.862269 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6202e6ec-74a4-4e11-a274-51bb9ca4a211-kube-api-access-xnz88" (OuterVolumeSpecName: "kube-api-access-xnz88") pod "6202e6ec-74a4-4e11-a274-51bb9ca4a211" (UID: "6202e6ec-74a4-4e11-a274-51bb9ca4a211"). InnerVolumeSpecName "kube-api-access-xnz88". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 08:16:16 crc kubenswrapper[4875]: I1007 08:16:16.885593 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6202e6ec-74a4-4e11-a274-51bb9ca4a211-config-data" (OuterVolumeSpecName: "config-data") pod "6202e6ec-74a4-4e11-a274-51bb9ca4a211" (UID: "6202e6ec-74a4-4e11-a274-51bb9ca4a211"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:16:16 crc kubenswrapper[4875]: I1007 08:16:16.887593 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6202e6ec-74a4-4e11-a274-51bb9ca4a211-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6202e6ec-74a4-4e11-a274-51bb9ca4a211" (UID: "6202e6ec-74a4-4e11-a274-51bb9ca4a211"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:16:16 crc kubenswrapper[4875]: I1007 08:16:16.946239 4875 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6202e6ec-74a4-4e11-a274-51bb9ca4a211-config-data\") on node \"crc\" DevicePath \"\"" Oct 07 08:16:16 crc kubenswrapper[4875]: I1007 08:16:16.946305 4875 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6202e6ec-74a4-4e11-a274-51bb9ca4a211-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 08:16:16 crc kubenswrapper[4875]: I1007 08:16:16.946323 4875 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6202e6ec-74a4-4e11-a274-51bb9ca4a211-logs\") on node \"crc\" DevicePath \"\"" Oct 07 08:16:16 crc kubenswrapper[4875]: I1007 08:16:16.946335 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xnz88\" (UniqueName: \"kubernetes.io/projected/6202e6ec-74a4-4e11-a274-51bb9ca4a211-kube-api-access-xnz88\") on node \"crc\" DevicePath \"\"" Oct 07 08:16:17 crc kubenswrapper[4875]: I1007 08:16:17.286266 4875 generic.go:334] "Generic (PLEG): container finished" podID="6202e6ec-74a4-4e11-a274-51bb9ca4a211" containerID="db1c822e8f95fdfa6d08bf82ddd0dc5db04e35743f98a61a2badf088b1dd9894" exitCode=0 Oct 07 08:16:17 crc kubenswrapper[4875]: I1007 08:16:17.286404 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 07 08:16:17 crc kubenswrapper[4875]: I1007 08:16:17.286388 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"6202e6ec-74a4-4e11-a274-51bb9ca4a211","Type":"ContainerDied","Data":"db1c822e8f95fdfa6d08bf82ddd0dc5db04e35743f98a61a2badf088b1dd9894"} Oct 07 08:16:17 crc kubenswrapper[4875]: I1007 08:16:17.287079 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"6202e6ec-74a4-4e11-a274-51bb9ca4a211","Type":"ContainerDied","Data":"1afae2dd306fbc1caa4723e687efa764e6551aca2cedce33e9b8acc7ae9b1cee"} Oct 07 08:16:17 crc kubenswrapper[4875]: I1007 08:16:17.287139 4875 scope.go:117] "RemoveContainer" containerID="db1c822e8f95fdfa6d08bf82ddd0dc5db04e35743f98a61a2badf088b1dd9894" Oct 07 08:16:17 crc kubenswrapper[4875]: I1007 08:16:17.294800 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d1f6457f-ba47-4cd9-a972-0efb66c6514f","Type":"ContainerDied","Data":"d32f74db835329fbfca2b3301f9fa4bf5c98cb0d523f1e7c6e01d58800ae2383"} Oct 07 08:16:17 crc kubenswrapper[4875]: I1007 08:16:17.294928 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 07 08:16:17 crc kubenswrapper[4875]: I1007 08:16:17.340858 4875 scope.go:117] "RemoveContainer" containerID="19910f58cd42fe42a26d286bec82884d0f0f7e5bbd83e34d5da9b9f9f4d8166b" Oct 07 08:16:17 crc kubenswrapper[4875]: I1007 08:16:17.341063 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Oct 07 08:16:17 crc kubenswrapper[4875]: I1007 08:16:17.367066 4875 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Oct 07 08:16:17 crc kubenswrapper[4875]: I1007 08:16:17.386319 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 07 08:16:17 crc kubenswrapper[4875]: I1007 08:16:17.417461 4875 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Oct 07 08:16:17 crc kubenswrapper[4875]: I1007 08:16:17.420367 4875 scope.go:117] "RemoveContainer" containerID="db1c822e8f95fdfa6d08bf82ddd0dc5db04e35743f98a61a2badf088b1dd9894" Oct 07 08:16:17 crc kubenswrapper[4875]: E1007 08:16:17.422340 4875 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"db1c822e8f95fdfa6d08bf82ddd0dc5db04e35743f98a61a2badf088b1dd9894\": container with ID starting with db1c822e8f95fdfa6d08bf82ddd0dc5db04e35743f98a61a2badf088b1dd9894 not found: ID does not exist" containerID="db1c822e8f95fdfa6d08bf82ddd0dc5db04e35743f98a61a2badf088b1dd9894" Oct 07 08:16:17 crc kubenswrapper[4875]: I1007 08:16:17.422421 4875 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"db1c822e8f95fdfa6d08bf82ddd0dc5db04e35743f98a61a2badf088b1dd9894"} err="failed to get container status \"db1c822e8f95fdfa6d08bf82ddd0dc5db04e35743f98a61a2badf088b1dd9894\": rpc error: code = NotFound desc = could not find container \"db1c822e8f95fdfa6d08bf82ddd0dc5db04e35743f98a61a2badf088b1dd9894\": container with ID starting with db1c822e8f95fdfa6d08bf82ddd0dc5db04e35743f98a61a2badf088b1dd9894 not found: ID does not exist" Oct 07 08:16:17 crc kubenswrapper[4875]: I1007 08:16:17.422459 4875 scope.go:117] "RemoveContainer" containerID="19910f58cd42fe42a26d286bec82884d0f0f7e5bbd83e34d5da9b9f9f4d8166b" Oct 07 08:16:17 crc kubenswrapper[4875]: E1007 08:16:17.423513 4875 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"19910f58cd42fe42a26d286bec82884d0f0f7e5bbd83e34d5da9b9f9f4d8166b\": container with ID starting with 19910f58cd42fe42a26d286bec82884d0f0f7e5bbd83e34d5da9b9f9f4d8166b not found: ID does not exist" containerID="19910f58cd42fe42a26d286bec82884d0f0f7e5bbd83e34d5da9b9f9f4d8166b" Oct 07 08:16:17 crc kubenswrapper[4875]: I1007 08:16:17.423581 4875 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"19910f58cd42fe42a26d286bec82884d0f0f7e5bbd83e34d5da9b9f9f4d8166b"} err="failed to get container status \"19910f58cd42fe42a26d286bec82884d0f0f7e5bbd83e34d5da9b9f9f4d8166b\": rpc error: code = NotFound desc = could not find container \"19910f58cd42fe42a26d286bec82884d0f0f7e5bbd83e34d5da9b9f9f4d8166b\": container with ID starting with 19910f58cd42fe42a26d286bec82884d0f0f7e5bbd83e34d5da9b9f9f4d8166b not found: ID does not exist" Oct 07 08:16:17 crc kubenswrapper[4875]: I1007 08:16:17.423622 4875 scope.go:117] "RemoveContainer" containerID="de8a95992c17127bbc5516d3d84e198bfd5b80424143aca3ca64fd68638aac70" Oct 07 08:16:17 crc kubenswrapper[4875]: I1007 08:16:17.462676 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Oct 07 08:16:17 crc kubenswrapper[4875]: E1007 08:16:17.463272 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d1f6457f-ba47-4cd9-a972-0efb66c6514f" containerName="ceilometer-central-agent" Oct 07 08:16:17 crc kubenswrapper[4875]: I1007 08:16:17.463300 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="d1f6457f-ba47-4cd9-a972-0efb66c6514f" containerName="ceilometer-central-agent" Oct 07 08:16:17 crc kubenswrapper[4875]: E1007 08:16:17.463320 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d1f6457f-ba47-4cd9-a972-0efb66c6514f" containerName="proxy-httpd" Oct 07 08:16:17 crc kubenswrapper[4875]: I1007 08:16:17.463328 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="d1f6457f-ba47-4cd9-a972-0efb66c6514f" containerName="proxy-httpd" Oct 07 08:16:17 crc kubenswrapper[4875]: E1007 08:16:17.463359 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d1f6457f-ba47-4cd9-a972-0efb66c6514f" containerName="sg-core" Oct 07 08:16:17 crc kubenswrapper[4875]: I1007 08:16:17.463367 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="d1f6457f-ba47-4cd9-a972-0efb66c6514f" containerName="sg-core" Oct 07 08:16:17 crc kubenswrapper[4875]: E1007 08:16:17.463382 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6202e6ec-74a4-4e11-a274-51bb9ca4a211" containerName="nova-api-api" Oct 07 08:16:17 crc kubenswrapper[4875]: I1007 08:16:17.463389 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="6202e6ec-74a4-4e11-a274-51bb9ca4a211" containerName="nova-api-api" Oct 07 08:16:17 crc kubenswrapper[4875]: E1007 08:16:17.463404 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d1f6457f-ba47-4cd9-a972-0efb66c6514f" containerName="ceilometer-notification-agent" Oct 07 08:16:17 crc kubenswrapper[4875]: I1007 08:16:17.463410 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="d1f6457f-ba47-4cd9-a972-0efb66c6514f" containerName="ceilometer-notification-agent" Oct 07 08:16:17 crc kubenswrapper[4875]: E1007 08:16:17.463422 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6202e6ec-74a4-4e11-a274-51bb9ca4a211" containerName="nova-api-log" Oct 07 08:16:17 crc kubenswrapper[4875]: I1007 08:16:17.463427 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="6202e6ec-74a4-4e11-a274-51bb9ca4a211" containerName="nova-api-log" Oct 07 08:16:17 crc kubenswrapper[4875]: I1007 08:16:17.463659 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="d1f6457f-ba47-4cd9-a972-0efb66c6514f" containerName="ceilometer-notification-agent" Oct 07 08:16:17 crc kubenswrapper[4875]: I1007 08:16:17.463678 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="6202e6ec-74a4-4e11-a274-51bb9ca4a211" containerName="nova-api-api" Oct 07 08:16:17 crc kubenswrapper[4875]: I1007 08:16:17.463690 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="d1f6457f-ba47-4cd9-a972-0efb66c6514f" containerName="sg-core" Oct 07 08:16:17 crc kubenswrapper[4875]: I1007 08:16:17.463713 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="d1f6457f-ba47-4cd9-a972-0efb66c6514f" containerName="ceilometer-central-agent" Oct 07 08:16:17 crc kubenswrapper[4875]: I1007 08:16:17.463725 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="6202e6ec-74a4-4e11-a274-51bb9ca4a211" containerName="nova-api-log" Oct 07 08:16:17 crc kubenswrapper[4875]: I1007 08:16:17.463741 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="d1f6457f-ba47-4cd9-a972-0efb66c6514f" containerName="proxy-httpd" Oct 07 08:16:17 crc kubenswrapper[4875]: I1007 08:16:17.465106 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 07 08:16:17 crc kubenswrapper[4875]: I1007 08:16:17.476126 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Oct 07 08:16:17 crc kubenswrapper[4875]: I1007 08:16:17.479076 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Oct 07 08:16:17 crc kubenswrapper[4875]: I1007 08:16:17.479164 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Oct 07 08:16:17 crc kubenswrapper[4875]: I1007 08:16:17.479623 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Oct 07 08:16:17 crc kubenswrapper[4875]: I1007 08:16:17.486176 4875 scope.go:117] "RemoveContainer" containerID="b853f11f8111eabc5f96b724fcb4812bc69ace234291a62023d09b8acb8c29af" Oct 07 08:16:17 crc kubenswrapper[4875]: I1007 08:16:17.496025 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 07 08:16:17 crc kubenswrapper[4875]: I1007 08:16:17.496938 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 07 08:16:17 crc kubenswrapper[4875]: I1007 08:16:17.497077 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 07 08:16:17 crc kubenswrapper[4875]: I1007 08:16:17.500293 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Oct 07 08:16:17 crc kubenswrapper[4875]: I1007 08:16:17.500561 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Oct 07 08:16:17 crc kubenswrapper[4875]: I1007 08:16:17.501062 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Oct 07 08:16:17 crc kubenswrapper[4875]: I1007 08:16:17.559761 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/453d5031-7d52-48b9-abd5-5c261297ee70-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"453d5031-7d52-48b9-abd5-5c261297ee70\") " pod="openstack/ceilometer-0" Oct 07 08:16:17 crc kubenswrapper[4875]: I1007 08:16:17.559849 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/9f81565c-baf5-40d3-9463-d3732ed1104b-public-tls-certs\") pod \"nova-api-0\" (UID: \"9f81565c-baf5-40d3-9463-d3732ed1104b\") " pod="openstack/nova-api-0" Oct 07 08:16:17 crc kubenswrapper[4875]: I1007 08:16:17.559920 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/453d5031-7d52-48b9-abd5-5c261297ee70-config-data\") pod \"ceilometer-0\" (UID: \"453d5031-7d52-48b9-abd5-5c261297ee70\") " pod="openstack/ceilometer-0" Oct 07 08:16:17 crc kubenswrapper[4875]: I1007 08:16:17.559969 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mqdhv\" (UniqueName: \"kubernetes.io/projected/453d5031-7d52-48b9-abd5-5c261297ee70-kube-api-access-mqdhv\") pod \"ceilometer-0\" (UID: \"453d5031-7d52-48b9-abd5-5c261297ee70\") " pod="openstack/ceilometer-0" Oct 07 08:16:17 crc kubenswrapper[4875]: I1007 08:16:17.559994 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/453d5031-7d52-48b9-abd5-5c261297ee70-run-httpd\") pod \"ceilometer-0\" (UID: \"453d5031-7d52-48b9-abd5-5c261297ee70\") " pod="openstack/ceilometer-0" Oct 07 08:16:17 crc kubenswrapper[4875]: I1007 08:16:17.560014 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/453d5031-7d52-48b9-abd5-5c261297ee70-scripts\") pod \"ceilometer-0\" (UID: \"453d5031-7d52-48b9-abd5-5c261297ee70\") " pod="openstack/ceilometer-0" Oct 07 08:16:17 crc kubenswrapper[4875]: I1007 08:16:17.560033 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9f81565c-baf5-40d3-9463-d3732ed1104b-logs\") pod \"nova-api-0\" (UID: \"9f81565c-baf5-40d3-9463-d3732ed1104b\") " pod="openstack/nova-api-0" Oct 07 08:16:17 crc kubenswrapper[4875]: I1007 08:16:17.560064 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/453d5031-7d52-48b9-abd5-5c261297ee70-log-httpd\") pod \"ceilometer-0\" (UID: \"453d5031-7d52-48b9-abd5-5c261297ee70\") " pod="openstack/ceilometer-0" Oct 07 08:16:17 crc kubenswrapper[4875]: I1007 08:16:17.560084 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/9f81565c-baf5-40d3-9463-d3732ed1104b-internal-tls-certs\") pod \"nova-api-0\" (UID: \"9f81565c-baf5-40d3-9463-d3732ed1104b\") " pod="openstack/nova-api-0" Oct 07 08:16:17 crc kubenswrapper[4875]: I1007 08:16:17.560107 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/453d5031-7d52-48b9-abd5-5c261297ee70-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"453d5031-7d52-48b9-abd5-5c261297ee70\") " pod="openstack/ceilometer-0" Oct 07 08:16:17 crc kubenswrapper[4875]: I1007 08:16:17.560136 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/453d5031-7d52-48b9-abd5-5c261297ee70-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"453d5031-7d52-48b9-abd5-5c261297ee70\") " pod="openstack/ceilometer-0" Oct 07 08:16:17 crc kubenswrapper[4875]: I1007 08:16:17.560179 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9f81565c-baf5-40d3-9463-d3732ed1104b-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"9f81565c-baf5-40d3-9463-d3732ed1104b\") " pod="openstack/nova-api-0" Oct 07 08:16:17 crc kubenswrapper[4875]: I1007 08:16:17.560297 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hkst2\" (UniqueName: \"kubernetes.io/projected/9f81565c-baf5-40d3-9463-d3732ed1104b-kube-api-access-hkst2\") pod \"nova-api-0\" (UID: \"9f81565c-baf5-40d3-9463-d3732ed1104b\") " pod="openstack/nova-api-0" Oct 07 08:16:17 crc kubenswrapper[4875]: I1007 08:16:17.560326 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9f81565c-baf5-40d3-9463-d3732ed1104b-config-data\") pod \"nova-api-0\" (UID: \"9f81565c-baf5-40d3-9463-d3732ed1104b\") " pod="openstack/nova-api-0" Oct 07 08:16:17 crc kubenswrapper[4875]: I1007 08:16:17.561215 4875 scope.go:117] "RemoveContainer" containerID="e6d3906e53422aafd7693a516fd1a217547037d07f728611484f8ed94ca062ee" Oct 07 08:16:17 crc kubenswrapper[4875]: I1007 08:16:17.618560 4875 scope.go:117] "RemoveContainer" containerID="3e97a8753274753a3934d5c8dcc4545f97342e2b371bc8fca9acfe1e100e7e9c" Oct 07 08:16:17 crc kubenswrapper[4875]: I1007 08:16:17.662530 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/9f81565c-baf5-40d3-9463-d3732ed1104b-public-tls-certs\") pod \"nova-api-0\" (UID: \"9f81565c-baf5-40d3-9463-d3732ed1104b\") " pod="openstack/nova-api-0" Oct 07 08:16:17 crc kubenswrapper[4875]: I1007 08:16:17.662613 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/453d5031-7d52-48b9-abd5-5c261297ee70-config-data\") pod \"ceilometer-0\" (UID: \"453d5031-7d52-48b9-abd5-5c261297ee70\") " pod="openstack/ceilometer-0" Oct 07 08:16:17 crc kubenswrapper[4875]: I1007 08:16:17.662655 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mqdhv\" (UniqueName: \"kubernetes.io/projected/453d5031-7d52-48b9-abd5-5c261297ee70-kube-api-access-mqdhv\") pod \"ceilometer-0\" (UID: \"453d5031-7d52-48b9-abd5-5c261297ee70\") " pod="openstack/ceilometer-0" Oct 07 08:16:17 crc kubenswrapper[4875]: I1007 08:16:17.662685 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/453d5031-7d52-48b9-abd5-5c261297ee70-run-httpd\") pod \"ceilometer-0\" (UID: \"453d5031-7d52-48b9-abd5-5c261297ee70\") " pod="openstack/ceilometer-0" Oct 07 08:16:17 crc kubenswrapper[4875]: I1007 08:16:17.662704 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/453d5031-7d52-48b9-abd5-5c261297ee70-scripts\") pod \"ceilometer-0\" (UID: \"453d5031-7d52-48b9-abd5-5c261297ee70\") " pod="openstack/ceilometer-0" Oct 07 08:16:17 crc kubenswrapper[4875]: I1007 08:16:17.662729 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9f81565c-baf5-40d3-9463-d3732ed1104b-logs\") pod \"nova-api-0\" (UID: \"9f81565c-baf5-40d3-9463-d3732ed1104b\") " pod="openstack/nova-api-0" Oct 07 08:16:17 crc kubenswrapper[4875]: I1007 08:16:17.662758 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/453d5031-7d52-48b9-abd5-5c261297ee70-log-httpd\") pod \"ceilometer-0\" (UID: \"453d5031-7d52-48b9-abd5-5c261297ee70\") " pod="openstack/ceilometer-0" Oct 07 08:16:17 crc kubenswrapper[4875]: I1007 08:16:17.662778 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/9f81565c-baf5-40d3-9463-d3732ed1104b-internal-tls-certs\") pod \"nova-api-0\" (UID: \"9f81565c-baf5-40d3-9463-d3732ed1104b\") " pod="openstack/nova-api-0" Oct 07 08:16:17 crc kubenswrapper[4875]: I1007 08:16:17.662803 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/453d5031-7d52-48b9-abd5-5c261297ee70-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"453d5031-7d52-48b9-abd5-5c261297ee70\") " pod="openstack/ceilometer-0" Oct 07 08:16:17 crc kubenswrapper[4875]: I1007 08:16:17.662837 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/453d5031-7d52-48b9-abd5-5c261297ee70-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"453d5031-7d52-48b9-abd5-5c261297ee70\") " pod="openstack/ceilometer-0" Oct 07 08:16:17 crc kubenswrapper[4875]: I1007 08:16:17.662901 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9f81565c-baf5-40d3-9463-d3732ed1104b-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"9f81565c-baf5-40d3-9463-d3732ed1104b\") " pod="openstack/nova-api-0" Oct 07 08:16:17 crc kubenswrapper[4875]: I1007 08:16:17.662926 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hkst2\" (UniqueName: \"kubernetes.io/projected/9f81565c-baf5-40d3-9463-d3732ed1104b-kube-api-access-hkst2\") pod \"nova-api-0\" (UID: \"9f81565c-baf5-40d3-9463-d3732ed1104b\") " pod="openstack/nova-api-0" Oct 07 08:16:17 crc kubenswrapper[4875]: I1007 08:16:17.662948 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9f81565c-baf5-40d3-9463-d3732ed1104b-config-data\") pod \"nova-api-0\" (UID: \"9f81565c-baf5-40d3-9463-d3732ed1104b\") " pod="openstack/nova-api-0" Oct 07 08:16:17 crc kubenswrapper[4875]: I1007 08:16:17.662973 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/453d5031-7d52-48b9-abd5-5c261297ee70-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"453d5031-7d52-48b9-abd5-5c261297ee70\") " pod="openstack/ceilometer-0" Oct 07 08:16:17 crc kubenswrapper[4875]: I1007 08:16:17.664213 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9f81565c-baf5-40d3-9463-d3732ed1104b-logs\") pod \"nova-api-0\" (UID: \"9f81565c-baf5-40d3-9463-d3732ed1104b\") " pod="openstack/nova-api-0" Oct 07 08:16:17 crc kubenswrapper[4875]: I1007 08:16:17.664302 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/453d5031-7d52-48b9-abd5-5c261297ee70-log-httpd\") pod \"ceilometer-0\" (UID: \"453d5031-7d52-48b9-abd5-5c261297ee70\") " pod="openstack/ceilometer-0" Oct 07 08:16:17 crc kubenswrapper[4875]: I1007 08:16:17.664954 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/453d5031-7d52-48b9-abd5-5c261297ee70-run-httpd\") pod \"ceilometer-0\" (UID: \"453d5031-7d52-48b9-abd5-5c261297ee70\") " pod="openstack/ceilometer-0" Oct 07 08:16:17 crc kubenswrapper[4875]: I1007 08:16:17.670394 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9f81565c-baf5-40d3-9463-d3732ed1104b-config-data\") pod \"nova-api-0\" (UID: \"9f81565c-baf5-40d3-9463-d3732ed1104b\") " pod="openstack/nova-api-0" Oct 07 08:16:17 crc kubenswrapper[4875]: I1007 08:16:17.670964 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/453d5031-7d52-48b9-abd5-5c261297ee70-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"453d5031-7d52-48b9-abd5-5c261297ee70\") " pod="openstack/ceilometer-0" Oct 07 08:16:17 crc kubenswrapper[4875]: I1007 08:16:17.671431 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/453d5031-7d52-48b9-abd5-5c261297ee70-scripts\") pod \"ceilometer-0\" (UID: \"453d5031-7d52-48b9-abd5-5c261297ee70\") " pod="openstack/ceilometer-0" Oct 07 08:16:17 crc kubenswrapper[4875]: I1007 08:16:17.672712 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/453d5031-7d52-48b9-abd5-5c261297ee70-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"453d5031-7d52-48b9-abd5-5c261297ee70\") " pod="openstack/ceilometer-0" Oct 07 08:16:17 crc kubenswrapper[4875]: I1007 08:16:17.673005 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9f81565c-baf5-40d3-9463-d3732ed1104b-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"9f81565c-baf5-40d3-9463-d3732ed1104b\") " pod="openstack/nova-api-0" Oct 07 08:16:17 crc kubenswrapper[4875]: I1007 08:16:17.673147 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/453d5031-7d52-48b9-abd5-5c261297ee70-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"453d5031-7d52-48b9-abd5-5c261297ee70\") " pod="openstack/ceilometer-0" Oct 07 08:16:17 crc kubenswrapper[4875]: I1007 08:16:17.674144 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/9f81565c-baf5-40d3-9463-d3732ed1104b-internal-tls-certs\") pod \"nova-api-0\" (UID: \"9f81565c-baf5-40d3-9463-d3732ed1104b\") " pod="openstack/nova-api-0" Oct 07 08:16:17 crc kubenswrapper[4875]: I1007 08:16:17.675158 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/453d5031-7d52-48b9-abd5-5c261297ee70-config-data\") pod \"ceilometer-0\" (UID: \"453d5031-7d52-48b9-abd5-5c261297ee70\") " pod="openstack/ceilometer-0" Oct 07 08:16:17 crc kubenswrapper[4875]: I1007 08:16:17.678346 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/9f81565c-baf5-40d3-9463-d3732ed1104b-public-tls-certs\") pod \"nova-api-0\" (UID: \"9f81565c-baf5-40d3-9463-d3732ed1104b\") " pod="openstack/nova-api-0" Oct 07 08:16:17 crc kubenswrapper[4875]: I1007 08:16:17.685008 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hkst2\" (UniqueName: \"kubernetes.io/projected/9f81565c-baf5-40d3-9463-d3732ed1104b-kube-api-access-hkst2\") pod \"nova-api-0\" (UID: \"9f81565c-baf5-40d3-9463-d3732ed1104b\") " pod="openstack/nova-api-0" Oct 07 08:16:17 crc kubenswrapper[4875]: I1007 08:16:17.686585 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mqdhv\" (UniqueName: \"kubernetes.io/projected/453d5031-7d52-48b9-abd5-5c261297ee70-kube-api-access-mqdhv\") pod \"ceilometer-0\" (UID: \"453d5031-7d52-48b9-abd5-5c261297ee70\") " pod="openstack/ceilometer-0" Oct 07 08:16:17 crc kubenswrapper[4875]: I1007 08:16:17.713022 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6202e6ec-74a4-4e11-a274-51bb9ca4a211" path="/var/lib/kubelet/pods/6202e6ec-74a4-4e11-a274-51bb9ca4a211/volumes" Oct 07 08:16:17 crc kubenswrapper[4875]: I1007 08:16:17.714087 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d1f6457f-ba47-4cd9-a972-0efb66c6514f" path="/var/lib/kubelet/pods/d1f6457f-ba47-4cd9-a972-0efb66c6514f/volumes" Oct 07 08:16:17 crc kubenswrapper[4875]: I1007 08:16:17.794974 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 07 08:16:17 crc kubenswrapper[4875]: I1007 08:16:17.823475 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 07 08:16:18 crc kubenswrapper[4875]: I1007 08:16:18.348629 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 07 08:16:18 crc kubenswrapper[4875]: I1007 08:16:18.356966 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 07 08:16:18 crc kubenswrapper[4875]: W1007 08:16:18.364707 4875 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod453d5031_7d52_48b9_abd5_5c261297ee70.slice/crio-122303a2e4e145cdc4c0a9b1dab2bc7ad014b78648028ad6d531579c87d00354 WatchSource:0}: Error finding container 122303a2e4e145cdc4c0a9b1dab2bc7ad014b78648028ad6d531579c87d00354: Status 404 returned error can't find the container with id 122303a2e4e145cdc4c0a9b1dab2bc7ad014b78648028ad6d531579c87d00354 Oct 07 08:16:18 crc kubenswrapper[4875]: I1007 08:16:18.581341 4875 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-cell1-novncproxy-0" Oct 07 08:16:18 crc kubenswrapper[4875]: I1007 08:16:18.605148 4875 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-cell1-novncproxy-0" Oct 07 08:16:19 crc kubenswrapper[4875]: I1007 08:16:19.338306 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"453d5031-7d52-48b9-abd5-5c261297ee70","Type":"ContainerStarted","Data":"5936f810181ffb0fe7c8b35c21ecfc904047b7cb85723a74b4ffc6413a96b1a9"} Oct 07 08:16:19 crc kubenswrapper[4875]: I1007 08:16:19.338729 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"453d5031-7d52-48b9-abd5-5c261297ee70","Type":"ContainerStarted","Data":"122303a2e4e145cdc4c0a9b1dab2bc7ad014b78648028ad6d531579c87d00354"} Oct 07 08:16:19 crc kubenswrapper[4875]: I1007 08:16:19.343349 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"9f81565c-baf5-40d3-9463-d3732ed1104b","Type":"ContainerStarted","Data":"4b3fbe1b2b70b0687f5ccb59de2f319eec054023e430c776d3d0cf6c28686f6f"} Oct 07 08:16:19 crc kubenswrapper[4875]: I1007 08:16:19.343430 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"9f81565c-baf5-40d3-9463-d3732ed1104b","Type":"ContainerStarted","Data":"17b4f82a3009d9461017dbba2257e34ecf42c3c06d2c6188b473ff138064c44c"} Oct 07 08:16:19 crc kubenswrapper[4875]: I1007 08:16:19.343455 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"9f81565c-baf5-40d3-9463-d3732ed1104b","Type":"ContainerStarted","Data":"e608e6e2097cb84282d7373c2f3b177f40cef15ed63394e464518dfb26ec1edc"} Oct 07 08:16:19 crc kubenswrapper[4875]: I1007 08:16:19.359106 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-novncproxy-0" Oct 07 08:16:19 crc kubenswrapper[4875]: I1007 08:16:19.381374 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.381349721 podStartE2EDuration="2.381349721s" podCreationTimestamp="2025-10-07 08:16:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 08:16:19.372825888 +0000 UTC m=+1204.332596441" watchObservedRunningTime="2025-10-07 08:16:19.381349721 +0000 UTC m=+1204.341120264" Oct 07 08:16:19 crc kubenswrapper[4875]: I1007 08:16:19.546625 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-cell-mapping-sg8rr"] Oct 07 08:16:19 crc kubenswrapper[4875]: I1007 08:16:19.548568 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-sg8rr" Oct 07 08:16:19 crc kubenswrapper[4875]: I1007 08:16:19.551350 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-config-data" Oct 07 08:16:19 crc kubenswrapper[4875]: I1007 08:16:19.551953 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-scripts" Oct 07 08:16:19 crc kubenswrapper[4875]: I1007 08:16:19.565187 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-sg8rr"] Oct 07 08:16:19 crc kubenswrapper[4875]: I1007 08:16:19.608027 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/08172c25-7af0-4bf8-a8e3-aae5b6403176-scripts\") pod \"nova-cell1-cell-mapping-sg8rr\" (UID: \"08172c25-7af0-4bf8-a8e3-aae5b6403176\") " pod="openstack/nova-cell1-cell-mapping-sg8rr" Oct 07 08:16:19 crc kubenswrapper[4875]: I1007 08:16:19.608125 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/08172c25-7af0-4bf8-a8e3-aae5b6403176-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-sg8rr\" (UID: \"08172c25-7af0-4bf8-a8e3-aae5b6403176\") " pod="openstack/nova-cell1-cell-mapping-sg8rr" Oct 07 08:16:19 crc kubenswrapper[4875]: I1007 08:16:19.608260 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/08172c25-7af0-4bf8-a8e3-aae5b6403176-config-data\") pod \"nova-cell1-cell-mapping-sg8rr\" (UID: \"08172c25-7af0-4bf8-a8e3-aae5b6403176\") " pod="openstack/nova-cell1-cell-mapping-sg8rr" Oct 07 08:16:19 crc kubenswrapper[4875]: I1007 08:16:19.608538 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5vpx2\" (UniqueName: \"kubernetes.io/projected/08172c25-7af0-4bf8-a8e3-aae5b6403176-kube-api-access-5vpx2\") pod \"nova-cell1-cell-mapping-sg8rr\" (UID: \"08172c25-7af0-4bf8-a8e3-aae5b6403176\") " pod="openstack/nova-cell1-cell-mapping-sg8rr" Oct 07 08:16:19 crc kubenswrapper[4875]: I1007 08:16:19.712980 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/08172c25-7af0-4bf8-a8e3-aae5b6403176-scripts\") pod \"nova-cell1-cell-mapping-sg8rr\" (UID: \"08172c25-7af0-4bf8-a8e3-aae5b6403176\") " pod="openstack/nova-cell1-cell-mapping-sg8rr" Oct 07 08:16:19 crc kubenswrapper[4875]: I1007 08:16:19.713040 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/08172c25-7af0-4bf8-a8e3-aae5b6403176-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-sg8rr\" (UID: \"08172c25-7af0-4bf8-a8e3-aae5b6403176\") " pod="openstack/nova-cell1-cell-mapping-sg8rr" Oct 07 08:16:19 crc kubenswrapper[4875]: I1007 08:16:19.713122 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/08172c25-7af0-4bf8-a8e3-aae5b6403176-config-data\") pod \"nova-cell1-cell-mapping-sg8rr\" (UID: \"08172c25-7af0-4bf8-a8e3-aae5b6403176\") " pod="openstack/nova-cell1-cell-mapping-sg8rr" Oct 07 08:16:19 crc kubenswrapper[4875]: I1007 08:16:19.713176 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5vpx2\" (UniqueName: \"kubernetes.io/projected/08172c25-7af0-4bf8-a8e3-aae5b6403176-kube-api-access-5vpx2\") pod \"nova-cell1-cell-mapping-sg8rr\" (UID: \"08172c25-7af0-4bf8-a8e3-aae5b6403176\") " pod="openstack/nova-cell1-cell-mapping-sg8rr" Oct 07 08:16:19 crc kubenswrapper[4875]: I1007 08:16:19.725145 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/08172c25-7af0-4bf8-a8e3-aae5b6403176-config-data\") pod \"nova-cell1-cell-mapping-sg8rr\" (UID: \"08172c25-7af0-4bf8-a8e3-aae5b6403176\") " pod="openstack/nova-cell1-cell-mapping-sg8rr" Oct 07 08:16:19 crc kubenswrapper[4875]: I1007 08:16:19.729305 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/08172c25-7af0-4bf8-a8e3-aae5b6403176-scripts\") pod \"nova-cell1-cell-mapping-sg8rr\" (UID: \"08172c25-7af0-4bf8-a8e3-aae5b6403176\") " pod="openstack/nova-cell1-cell-mapping-sg8rr" Oct 07 08:16:19 crc kubenswrapper[4875]: I1007 08:16:19.730123 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/08172c25-7af0-4bf8-a8e3-aae5b6403176-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-sg8rr\" (UID: \"08172c25-7af0-4bf8-a8e3-aae5b6403176\") " pod="openstack/nova-cell1-cell-mapping-sg8rr" Oct 07 08:16:19 crc kubenswrapper[4875]: I1007 08:16:19.738751 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5vpx2\" (UniqueName: \"kubernetes.io/projected/08172c25-7af0-4bf8-a8e3-aae5b6403176-kube-api-access-5vpx2\") pod \"nova-cell1-cell-mapping-sg8rr\" (UID: \"08172c25-7af0-4bf8-a8e3-aae5b6403176\") " pod="openstack/nova-cell1-cell-mapping-sg8rr" Oct 07 08:16:20 crc kubenswrapper[4875]: I1007 08:16:20.015537 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-sg8rr" Oct 07 08:16:20 crc kubenswrapper[4875]: I1007 08:16:20.356460 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"453d5031-7d52-48b9-abd5-5c261297ee70","Type":"ContainerStarted","Data":"f7f366b1d4b004e689e3ddf4ff3a09a9018096c6ebefae203799c00b4af34dec"} Oct 07 08:16:20 crc kubenswrapper[4875]: I1007 08:16:20.481396 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-sg8rr"] Oct 07 08:16:20 crc kubenswrapper[4875]: W1007 08:16:20.493450 4875 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod08172c25_7af0_4bf8_a8e3_aae5b6403176.slice/crio-e81d4cc71e33f82b6d7718ddf47ab3966cec2d80f22efc77190c5aaab13c03ed WatchSource:0}: Error finding container e81d4cc71e33f82b6d7718ddf47ab3966cec2d80f22efc77190c5aaab13c03ed: Status 404 returned error can't find the container with id e81d4cc71e33f82b6d7718ddf47ab3966cec2d80f22efc77190c5aaab13c03ed Oct 07 08:16:20 crc kubenswrapper[4875]: I1007 08:16:20.777139 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-cd5cbd7b9-bpsf7" Oct 07 08:16:20 crc kubenswrapper[4875]: I1007 08:16:20.878814 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-bccf8f775-vp4dv"] Oct 07 08:16:20 crc kubenswrapper[4875]: I1007 08:16:20.879165 4875 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-bccf8f775-vp4dv" podUID="d16f042f-caee-49b6-bd11-7d12f614dc57" containerName="dnsmasq-dns" containerID="cri-o://8a418f7f2c6ec470bdc4ee0427bd1dfd58b0c03de2be564965a8de51bf4ff150" gracePeriod=10 Oct 07 08:16:20 crc kubenswrapper[4875]: I1007 08:16:20.893899 4875 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-bccf8f775-vp4dv" podUID="d16f042f-caee-49b6-bd11-7d12f614dc57" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.191:5353: connect: connection refused" Oct 07 08:16:21 crc kubenswrapper[4875]: I1007 08:16:21.374088 4875 generic.go:334] "Generic (PLEG): container finished" podID="d16f042f-caee-49b6-bd11-7d12f614dc57" containerID="8a418f7f2c6ec470bdc4ee0427bd1dfd58b0c03de2be564965a8de51bf4ff150" exitCode=0 Oct 07 08:16:21 crc kubenswrapper[4875]: I1007 08:16:21.374150 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-bccf8f775-vp4dv" event={"ID":"d16f042f-caee-49b6-bd11-7d12f614dc57","Type":"ContainerDied","Data":"8a418f7f2c6ec470bdc4ee0427bd1dfd58b0c03de2be564965a8de51bf4ff150"} Oct 07 08:16:21 crc kubenswrapper[4875]: I1007 08:16:21.375746 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-sg8rr" event={"ID":"08172c25-7af0-4bf8-a8e3-aae5b6403176","Type":"ContainerStarted","Data":"1486e2e795d66127280fce6db4987b955eaf0a37fc3a1ea6a0a0e4daffcc3040"} Oct 07 08:16:21 crc kubenswrapper[4875]: I1007 08:16:21.375770 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-sg8rr" event={"ID":"08172c25-7af0-4bf8-a8e3-aae5b6403176","Type":"ContainerStarted","Data":"e81d4cc71e33f82b6d7718ddf47ab3966cec2d80f22efc77190c5aaab13c03ed"} Oct 07 08:16:21 crc kubenswrapper[4875]: I1007 08:16:21.393049 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"453d5031-7d52-48b9-abd5-5c261297ee70","Type":"ContainerStarted","Data":"4139338e0e2af166577827f52805ab41ef88ba5915edf275450f7412b82fc82a"} Oct 07 08:16:21 crc kubenswrapper[4875]: I1007 08:16:21.412975 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-cell-mapping-sg8rr" podStartSLOduration=2.412954863 podStartE2EDuration="2.412954863s" podCreationTimestamp="2025-10-07 08:16:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 08:16:21.396394913 +0000 UTC m=+1206.356165456" watchObservedRunningTime="2025-10-07 08:16:21.412954863 +0000 UTC m=+1206.372725406" Oct 07 08:16:21 crc kubenswrapper[4875]: I1007 08:16:21.493821 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-bccf8f775-vp4dv" Oct 07 08:16:21 crc kubenswrapper[4875]: I1007 08:16:21.570593 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d16f042f-caee-49b6-bd11-7d12f614dc57-config\") pod \"d16f042f-caee-49b6-bd11-7d12f614dc57\" (UID: \"d16f042f-caee-49b6-bd11-7d12f614dc57\") " Oct 07 08:16:21 crc kubenswrapper[4875]: I1007 08:16:21.570708 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d16f042f-caee-49b6-bd11-7d12f614dc57-ovsdbserver-nb\") pod \"d16f042f-caee-49b6-bd11-7d12f614dc57\" (UID: \"d16f042f-caee-49b6-bd11-7d12f614dc57\") " Oct 07 08:16:21 crc kubenswrapper[4875]: I1007 08:16:21.570794 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d16f042f-caee-49b6-bd11-7d12f614dc57-dns-svc\") pod \"d16f042f-caee-49b6-bd11-7d12f614dc57\" (UID: \"d16f042f-caee-49b6-bd11-7d12f614dc57\") " Oct 07 08:16:21 crc kubenswrapper[4875]: I1007 08:16:21.570917 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d16f042f-caee-49b6-bd11-7d12f614dc57-ovsdbserver-sb\") pod \"d16f042f-caee-49b6-bd11-7d12f614dc57\" (UID: \"d16f042f-caee-49b6-bd11-7d12f614dc57\") " Oct 07 08:16:21 crc kubenswrapper[4875]: I1007 08:16:21.570963 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ft86t\" (UniqueName: \"kubernetes.io/projected/d16f042f-caee-49b6-bd11-7d12f614dc57-kube-api-access-ft86t\") pod \"d16f042f-caee-49b6-bd11-7d12f614dc57\" (UID: \"d16f042f-caee-49b6-bd11-7d12f614dc57\") " Oct 07 08:16:21 crc kubenswrapper[4875]: I1007 08:16:21.571015 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d16f042f-caee-49b6-bd11-7d12f614dc57-dns-swift-storage-0\") pod \"d16f042f-caee-49b6-bd11-7d12f614dc57\" (UID: \"d16f042f-caee-49b6-bd11-7d12f614dc57\") " Oct 07 08:16:21 crc kubenswrapper[4875]: I1007 08:16:21.583446 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d16f042f-caee-49b6-bd11-7d12f614dc57-kube-api-access-ft86t" (OuterVolumeSpecName: "kube-api-access-ft86t") pod "d16f042f-caee-49b6-bd11-7d12f614dc57" (UID: "d16f042f-caee-49b6-bd11-7d12f614dc57"). InnerVolumeSpecName "kube-api-access-ft86t". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 08:16:21 crc kubenswrapper[4875]: I1007 08:16:21.628229 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d16f042f-caee-49b6-bd11-7d12f614dc57-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "d16f042f-caee-49b6-bd11-7d12f614dc57" (UID: "d16f042f-caee-49b6-bd11-7d12f614dc57"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 08:16:21 crc kubenswrapper[4875]: I1007 08:16:21.630559 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d16f042f-caee-49b6-bd11-7d12f614dc57-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "d16f042f-caee-49b6-bd11-7d12f614dc57" (UID: "d16f042f-caee-49b6-bd11-7d12f614dc57"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 08:16:21 crc kubenswrapper[4875]: I1007 08:16:21.640353 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d16f042f-caee-49b6-bd11-7d12f614dc57-config" (OuterVolumeSpecName: "config") pod "d16f042f-caee-49b6-bd11-7d12f614dc57" (UID: "d16f042f-caee-49b6-bd11-7d12f614dc57"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 08:16:21 crc kubenswrapper[4875]: I1007 08:16:21.649291 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d16f042f-caee-49b6-bd11-7d12f614dc57-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "d16f042f-caee-49b6-bd11-7d12f614dc57" (UID: "d16f042f-caee-49b6-bd11-7d12f614dc57"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 08:16:21 crc kubenswrapper[4875]: I1007 08:16:21.674751 4875 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d16f042f-caee-49b6-bd11-7d12f614dc57-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 07 08:16:21 crc kubenswrapper[4875]: I1007 08:16:21.674788 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ft86t\" (UniqueName: \"kubernetes.io/projected/d16f042f-caee-49b6-bd11-7d12f614dc57-kube-api-access-ft86t\") on node \"crc\" DevicePath \"\"" Oct 07 08:16:21 crc kubenswrapper[4875]: I1007 08:16:21.674801 4875 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d16f042f-caee-49b6-bd11-7d12f614dc57-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Oct 07 08:16:21 crc kubenswrapper[4875]: I1007 08:16:21.674813 4875 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d16f042f-caee-49b6-bd11-7d12f614dc57-config\") on node \"crc\" DevicePath \"\"" Oct 07 08:16:21 crc kubenswrapper[4875]: I1007 08:16:21.674822 4875 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d16f042f-caee-49b6-bd11-7d12f614dc57-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 07 08:16:21 crc kubenswrapper[4875]: I1007 08:16:21.690341 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d16f042f-caee-49b6-bd11-7d12f614dc57-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "d16f042f-caee-49b6-bd11-7d12f614dc57" (UID: "d16f042f-caee-49b6-bd11-7d12f614dc57"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 08:16:21 crc kubenswrapper[4875]: I1007 08:16:21.776820 4875 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d16f042f-caee-49b6-bd11-7d12f614dc57-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 07 08:16:22 crc kubenswrapper[4875]: I1007 08:16:22.407998 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"453d5031-7d52-48b9-abd5-5c261297ee70","Type":"ContainerStarted","Data":"1d9475a64f9a2396f2af8de2f2eb365b8634f24658ebe3e88eb79b8f600922cc"} Oct 07 08:16:22 crc kubenswrapper[4875]: I1007 08:16:22.408679 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Oct 07 08:16:22 crc kubenswrapper[4875]: I1007 08:16:22.417566 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-bccf8f775-vp4dv" event={"ID":"d16f042f-caee-49b6-bd11-7d12f614dc57","Type":"ContainerDied","Data":"3633d29d56861a4bd7972a3e63d9b6314735f3a687792cf193fb7263e431b7b9"} Oct 07 08:16:22 crc kubenswrapper[4875]: I1007 08:16:22.417676 4875 scope.go:117] "RemoveContainer" containerID="8a418f7f2c6ec470bdc4ee0427bd1dfd58b0c03de2be564965a8de51bf4ff150" Oct 07 08:16:22 crc kubenswrapper[4875]: I1007 08:16:22.417852 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-bccf8f775-vp4dv" Oct 07 08:16:22 crc kubenswrapper[4875]: I1007 08:16:22.447117 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.957410466 podStartE2EDuration="5.447095196s" podCreationTimestamp="2025-10-07 08:16:17 +0000 UTC" firstStartedPulling="2025-10-07 08:16:18.370590377 +0000 UTC m=+1203.330360910" lastFinishedPulling="2025-10-07 08:16:21.860275097 +0000 UTC m=+1206.820045640" observedRunningTime="2025-10-07 08:16:22.437701865 +0000 UTC m=+1207.397472648" watchObservedRunningTime="2025-10-07 08:16:22.447095196 +0000 UTC m=+1207.406865739" Oct 07 08:16:22 crc kubenswrapper[4875]: I1007 08:16:22.460609 4875 scope.go:117] "RemoveContainer" containerID="2e4b7eafad22d04f1754a1a891cbaf3b46db13f16f7a502bbc1cf88083a7497d" Oct 07 08:16:22 crc kubenswrapper[4875]: I1007 08:16:22.474022 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-bccf8f775-vp4dv"] Oct 07 08:16:22 crc kubenswrapper[4875]: I1007 08:16:22.487396 4875 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-bccf8f775-vp4dv"] Oct 07 08:16:23 crc kubenswrapper[4875]: I1007 08:16:23.715227 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d16f042f-caee-49b6-bd11-7d12f614dc57" path="/var/lib/kubelet/pods/d16f042f-caee-49b6-bd11-7d12f614dc57/volumes" Oct 07 08:16:26 crc kubenswrapper[4875]: I1007 08:16:26.473922 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-sg8rr" event={"ID":"08172c25-7af0-4bf8-a8e3-aae5b6403176","Type":"ContainerDied","Data":"1486e2e795d66127280fce6db4987b955eaf0a37fc3a1ea6a0a0e4daffcc3040"} Oct 07 08:16:26 crc kubenswrapper[4875]: I1007 08:16:26.473956 4875 generic.go:334] "Generic (PLEG): container finished" podID="08172c25-7af0-4bf8-a8e3-aae5b6403176" containerID="1486e2e795d66127280fce6db4987b955eaf0a37fc3a1ea6a0a0e4daffcc3040" exitCode=0 Oct 07 08:16:27 crc kubenswrapper[4875]: I1007 08:16:27.795568 4875 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Oct 07 08:16:27 crc kubenswrapper[4875]: I1007 08:16:27.796465 4875 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Oct 07 08:16:27 crc kubenswrapper[4875]: I1007 08:16:27.859561 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-sg8rr" Oct 07 08:16:27 crc kubenswrapper[4875]: I1007 08:16:27.952429 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/08172c25-7af0-4bf8-a8e3-aae5b6403176-config-data\") pod \"08172c25-7af0-4bf8-a8e3-aae5b6403176\" (UID: \"08172c25-7af0-4bf8-a8e3-aae5b6403176\") " Oct 07 08:16:27 crc kubenswrapper[4875]: I1007 08:16:27.952561 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5vpx2\" (UniqueName: \"kubernetes.io/projected/08172c25-7af0-4bf8-a8e3-aae5b6403176-kube-api-access-5vpx2\") pod \"08172c25-7af0-4bf8-a8e3-aae5b6403176\" (UID: \"08172c25-7af0-4bf8-a8e3-aae5b6403176\") " Oct 07 08:16:27 crc kubenswrapper[4875]: I1007 08:16:27.952692 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/08172c25-7af0-4bf8-a8e3-aae5b6403176-combined-ca-bundle\") pod \"08172c25-7af0-4bf8-a8e3-aae5b6403176\" (UID: \"08172c25-7af0-4bf8-a8e3-aae5b6403176\") " Oct 07 08:16:27 crc kubenswrapper[4875]: I1007 08:16:27.952720 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/08172c25-7af0-4bf8-a8e3-aae5b6403176-scripts\") pod \"08172c25-7af0-4bf8-a8e3-aae5b6403176\" (UID: \"08172c25-7af0-4bf8-a8e3-aae5b6403176\") " Oct 07 08:16:27 crc kubenswrapper[4875]: I1007 08:16:27.964203 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/08172c25-7af0-4bf8-a8e3-aae5b6403176-scripts" (OuterVolumeSpecName: "scripts") pod "08172c25-7af0-4bf8-a8e3-aae5b6403176" (UID: "08172c25-7af0-4bf8-a8e3-aae5b6403176"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:16:27 crc kubenswrapper[4875]: I1007 08:16:27.964211 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/08172c25-7af0-4bf8-a8e3-aae5b6403176-kube-api-access-5vpx2" (OuterVolumeSpecName: "kube-api-access-5vpx2") pod "08172c25-7af0-4bf8-a8e3-aae5b6403176" (UID: "08172c25-7af0-4bf8-a8e3-aae5b6403176"). InnerVolumeSpecName "kube-api-access-5vpx2". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 08:16:27 crc kubenswrapper[4875]: I1007 08:16:27.981316 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/08172c25-7af0-4bf8-a8e3-aae5b6403176-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "08172c25-7af0-4bf8-a8e3-aae5b6403176" (UID: "08172c25-7af0-4bf8-a8e3-aae5b6403176"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:16:27 crc kubenswrapper[4875]: I1007 08:16:27.981827 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/08172c25-7af0-4bf8-a8e3-aae5b6403176-config-data" (OuterVolumeSpecName: "config-data") pod "08172c25-7af0-4bf8-a8e3-aae5b6403176" (UID: "08172c25-7af0-4bf8-a8e3-aae5b6403176"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:16:28 crc kubenswrapper[4875]: I1007 08:16:28.055445 4875 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/08172c25-7af0-4bf8-a8e3-aae5b6403176-config-data\") on node \"crc\" DevicePath \"\"" Oct 07 08:16:28 crc kubenswrapper[4875]: I1007 08:16:28.055482 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5vpx2\" (UniqueName: \"kubernetes.io/projected/08172c25-7af0-4bf8-a8e3-aae5b6403176-kube-api-access-5vpx2\") on node \"crc\" DevicePath \"\"" Oct 07 08:16:28 crc kubenswrapper[4875]: I1007 08:16:28.055494 4875 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/08172c25-7af0-4bf8-a8e3-aae5b6403176-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 08:16:28 crc kubenswrapper[4875]: I1007 08:16:28.055503 4875 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/08172c25-7af0-4bf8-a8e3-aae5b6403176-scripts\") on node \"crc\" DevicePath \"\"" Oct 07 08:16:28 crc kubenswrapper[4875]: I1007 08:16:28.504742 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-sg8rr" event={"ID":"08172c25-7af0-4bf8-a8e3-aae5b6403176","Type":"ContainerDied","Data":"e81d4cc71e33f82b6d7718ddf47ab3966cec2d80f22efc77190c5aaab13c03ed"} Oct 07 08:16:28 crc kubenswrapper[4875]: I1007 08:16:28.504793 4875 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e81d4cc71e33f82b6d7718ddf47ab3966cec2d80f22efc77190c5aaab13c03ed" Oct 07 08:16:28 crc kubenswrapper[4875]: I1007 08:16:28.504806 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-sg8rr" Oct 07 08:16:28 crc kubenswrapper[4875]: I1007 08:16:28.721946 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Oct 07 08:16:28 crc kubenswrapper[4875]: I1007 08:16:28.722633 4875 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="9f81565c-baf5-40d3-9463-d3732ed1104b" containerName="nova-api-log" containerID="cri-o://17b4f82a3009d9461017dbba2257e34ecf42c3c06d2c6188b473ff138064c44c" gracePeriod=30 Oct 07 08:16:28 crc kubenswrapper[4875]: I1007 08:16:28.723075 4875 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="9f81565c-baf5-40d3-9463-d3732ed1104b" containerName="nova-api-api" containerID="cri-o://4b3fbe1b2b70b0687f5ccb59de2f319eec054023e430c776d3d0cf6c28686f6f" gracePeriod=30 Oct 07 08:16:28 crc kubenswrapper[4875]: I1007 08:16:28.737790 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Oct 07 08:16:28 crc kubenswrapper[4875]: I1007 08:16:28.738053 4875 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="c56873e1-d427-407d-99f5-bde715855984" containerName="nova-metadata-log" containerID="cri-o://ceed5e66a47bd6d646f02be4dc12dfb351b9ff4b9c5ca5df85ecbcfcc83455e0" gracePeriod=30 Oct 07 08:16:28 crc kubenswrapper[4875]: I1007 08:16:28.738740 4875 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="c56873e1-d427-407d-99f5-bde715855984" containerName="nova-metadata-metadata" containerID="cri-o://63c82a09032e0dae2ca13ecbdb9f8bdec44fae1f6476248647978dfd49124728" gracePeriod=30 Oct 07 08:16:28 crc kubenswrapper[4875]: I1007 08:16:28.742755 4875 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="9f81565c-baf5-40d3-9463-d3732ed1104b" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.0.202:8774/\": EOF" Oct 07 08:16:28 crc kubenswrapper[4875]: I1007 08:16:28.749300 4875 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="9f81565c-baf5-40d3-9463-d3732ed1104b" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.0.202:8774/\": EOF" Oct 07 08:16:28 crc kubenswrapper[4875]: I1007 08:16:28.754449 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Oct 07 08:16:28 crc kubenswrapper[4875]: I1007 08:16:28.754689 4875 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="fe3c3ee8-4355-42b1-9ae1-bdb1cf1e4b93" containerName="nova-scheduler-scheduler" containerID="cri-o://00bfe9245c4977eb15e08016b4ad98460fceea28996dbdab65fbd2537074ccb8" gracePeriod=30 Oct 07 08:16:29 crc kubenswrapper[4875]: E1007 08:16:29.502646 4875 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="00bfe9245c4977eb15e08016b4ad98460fceea28996dbdab65fbd2537074ccb8" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Oct 07 08:16:29 crc kubenswrapper[4875]: E1007 08:16:29.507414 4875 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="00bfe9245c4977eb15e08016b4ad98460fceea28996dbdab65fbd2537074ccb8" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Oct 07 08:16:29 crc kubenswrapper[4875]: E1007 08:16:29.513186 4875 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="00bfe9245c4977eb15e08016b4ad98460fceea28996dbdab65fbd2537074ccb8" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Oct 07 08:16:29 crc kubenswrapper[4875]: E1007 08:16:29.513281 4875 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="fe3c3ee8-4355-42b1-9ae1-bdb1cf1e4b93" containerName="nova-scheduler-scheduler" Oct 07 08:16:29 crc kubenswrapper[4875]: I1007 08:16:29.519014 4875 generic.go:334] "Generic (PLEG): container finished" podID="9f81565c-baf5-40d3-9463-d3732ed1104b" containerID="17b4f82a3009d9461017dbba2257e34ecf42c3c06d2c6188b473ff138064c44c" exitCode=143 Oct 07 08:16:29 crc kubenswrapper[4875]: I1007 08:16:29.519083 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"9f81565c-baf5-40d3-9463-d3732ed1104b","Type":"ContainerDied","Data":"17b4f82a3009d9461017dbba2257e34ecf42c3c06d2c6188b473ff138064c44c"} Oct 07 08:16:29 crc kubenswrapper[4875]: I1007 08:16:29.521224 4875 generic.go:334] "Generic (PLEG): container finished" podID="c56873e1-d427-407d-99f5-bde715855984" containerID="ceed5e66a47bd6d646f02be4dc12dfb351b9ff4b9c5ca5df85ecbcfcc83455e0" exitCode=143 Oct 07 08:16:29 crc kubenswrapper[4875]: I1007 08:16:29.521270 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"c56873e1-d427-407d-99f5-bde715855984","Type":"ContainerDied","Data":"ceed5e66a47bd6d646f02be4dc12dfb351b9ff4b9c5ca5df85ecbcfcc83455e0"} Oct 07 08:16:31 crc kubenswrapper[4875]: I1007 08:16:31.886992 4875 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="c56873e1-d427-407d-99f5-bde715855984" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.194:8775/\": read tcp 10.217.0.2:50680->10.217.0.194:8775: read: connection reset by peer" Oct 07 08:16:31 crc kubenswrapper[4875]: I1007 08:16:31.886998 4875 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="c56873e1-d427-407d-99f5-bde715855984" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.194:8775/\": read tcp 10.217.0.2:50694->10.217.0.194:8775: read: connection reset by peer" Oct 07 08:16:32 crc kubenswrapper[4875]: I1007 08:16:32.486222 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 07 08:16:32 crc kubenswrapper[4875]: I1007 08:16:32.552294 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c56873e1-d427-407d-99f5-bde715855984-config-data\") pod \"c56873e1-d427-407d-99f5-bde715855984\" (UID: \"c56873e1-d427-407d-99f5-bde715855984\") " Oct 07 08:16:32 crc kubenswrapper[4875]: I1007 08:16:32.552515 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c56873e1-d427-407d-99f5-bde715855984-combined-ca-bundle\") pod \"c56873e1-d427-407d-99f5-bde715855984\" (UID: \"c56873e1-d427-407d-99f5-bde715855984\") " Oct 07 08:16:32 crc kubenswrapper[4875]: I1007 08:16:32.552643 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/c56873e1-d427-407d-99f5-bde715855984-nova-metadata-tls-certs\") pod \"c56873e1-d427-407d-99f5-bde715855984\" (UID: \"c56873e1-d427-407d-99f5-bde715855984\") " Oct 07 08:16:32 crc kubenswrapper[4875]: I1007 08:16:32.552677 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t4grl\" (UniqueName: \"kubernetes.io/projected/c56873e1-d427-407d-99f5-bde715855984-kube-api-access-t4grl\") pod \"c56873e1-d427-407d-99f5-bde715855984\" (UID: \"c56873e1-d427-407d-99f5-bde715855984\") " Oct 07 08:16:32 crc kubenswrapper[4875]: I1007 08:16:32.552733 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c56873e1-d427-407d-99f5-bde715855984-logs\") pod \"c56873e1-d427-407d-99f5-bde715855984\" (UID: \"c56873e1-d427-407d-99f5-bde715855984\") " Oct 07 08:16:32 crc kubenswrapper[4875]: I1007 08:16:32.554012 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c56873e1-d427-407d-99f5-bde715855984-logs" (OuterVolumeSpecName: "logs") pod "c56873e1-d427-407d-99f5-bde715855984" (UID: "c56873e1-d427-407d-99f5-bde715855984"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 08:16:32 crc kubenswrapper[4875]: I1007 08:16:32.559165 4875 generic.go:334] "Generic (PLEG): container finished" podID="c56873e1-d427-407d-99f5-bde715855984" containerID="63c82a09032e0dae2ca13ecbdb9f8bdec44fae1f6476248647978dfd49124728" exitCode=0 Oct 07 08:16:32 crc kubenswrapper[4875]: I1007 08:16:32.559248 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"c56873e1-d427-407d-99f5-bde715855984","Type":"ContainerDied","Data":"63c82a09032e0dae2ca13ecbdb9f8bdec44fae1f6476248647978dfd49124728"} Oct 07 08:16:32 crc kubenswrapper[4875]: I1007 08:16:32.559293 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"c56873e1-d427-407d-99f5-bde715855984","Type":"ContainerDied","Data":"74cfb6ce7f7d092a81c955def03700cf63dcf3c0a51868b845d5ae3b9137843d"} Oct 07 08:16:32 crc kubenswrapper[4875]: I1007 08:16:32.559324 4875 scope.go:117] "RemoveContainer" containerID="63c82a09032e0dae2ca13ecbdb9f8bdec44fae1f6476248647978dfd49124728" Oct 07 08:16:32 crc kubenswrapper[4875]: I1007 08:16:32.559626 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 07 08:16:32 crc kubenswrapper[4875]: I1007 08:16:32.561246 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c56873e1-d427-407d-99f5-bde715855984-kube-api-access-t4grl" (OuterVolumeSpecName: "kube-api-access-t4grl") pod "c56873e1-d427-407d-99f5-bde715855984" (UID: "c56873e1-d427-407d-99f5-bde715855984"). InnerVolumeSpecName "kube-api-access-t4grl". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 08:16:32 crc kubenswrapper[4875]: I1007 08:16:32.596160 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c56873e1-d427-407d-99f5-bde715855984-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c56873e1-d427-407d-99f5-bde715855984" (UID: "c56873e1-d427-407d-99f5-bde715855984"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:16:32 crc kubenswrapper[4875]: I1007 08:16:32.603402 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c56873e1-d427-407d-99f5-bde715855984-config-data" (OuterVolumeSpecName: "config-data") pod "c56873e1-d427-407d-99f5-bde715855984" (UID: "c56873e1-d427-407d-99f5-bde715855984"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:16:32 crc kubenswrapper[4875]: I1007 08:16:32.624227 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c56873e1-d427-407d-99f5-bde715855984-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "c56873e1-d427-407d-99f5-bde715855984" (UID: "c56873e1-d427-407d-99f5-bde715855984"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:16:32 crc kubenswrapper[4875]: I1007 08:16:32.655206 4875 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c56873e1-d427-407d-99f5-bde715855984-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 08:16:32 crc kubenswrapper[4875]: I1007 08:16:32.655245 4875 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/c56873e1-d427-407d-99f5-bde715855984-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 07 08:16:32 crc kubenswrapper[4875]: I1007 08:16:32.655257 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t4grl\" (UniqueName: \"kubernetes.io/projected/c56873e1-d427-407d-99f5-bde715855984-kube-api-access-t4grl\") on node \"crc\" DevicePath \"\"" Oct 07 08:16:32 crc kubenswrapper[4875]: I1007 08:16:32.655266 4875 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c56873e1-d427-407d-99f5-bde715855984-logs\") on node \"crc\" DevicePath \"\"" Oct 07 08:16:32 crc kubenswrapper[4875]: I1007 08:16:32.655278 4875 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c56873e1-d427-407d-99f5-bde715855984-config-data\") on node \"crc\" DevicePath \"\"" Oct 07 08:16:32 crc kubenswrapper[4875]: I1007 08:16:32.673912 4875 scope.go:117] "RemoveContainer" containerID="ceed5e66a47bd6d646f02be4dc12dfb351b9ff4b9c5ca5df85ecbcfcc83455e0" Oct 07 08:16:32 crc kubenswrapper[4875]: I1007 08:16:32.697907 4875 scope.go:117] "RemoveContainer" containerID="63c82a09032e0dae2ca13ecbdb9f8bdec44fae1f6476248647978dfd49124728" Oct 07 08:16:32 crc kubenswrapper[4875]: E1007 08:16:32.702165 4875 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"63c82a09032e0dae2ca13ecbdb9f8bdec44fae1f6476248647978dfd49124728\": container with ID starting with 63c82a09032e0dae2ca13ecbdb9f8bdec44fae1f6476248647978dfd49124728 not found: ID does not exist" containerID="63c82a09032e0dae2ca13ecbdb9f8bdec44fae1f6476248647978dfd49124728" Oct 07 08:16:32 crc kubenswrapper[4875]: I1007 08:16:32.702214 4875 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"63c82a09032e0dae2ca13ecbdb9f8bdec44fae1f6476248647978dfd49124728"} err="failed to get container status \"63c82a09032e0dae2ca13ecbdb9f8bdec44fae1f6476248647978dfd49124728\": rpc error: code = NotFound desc = could not find container \"63c82a09032e0dae2ca13ecbdb9f8bdec44fae1f6476248647978dfd49124728\": container with ID starting with 63c82a09032e0dae2ca13ecbdb9f8bdec44fae1f6476248647978dfd49124728 not found: ID does not exist" Oct 07 08:16:32 crc kubenswrapper[4875]: I1007 08:16:32.702242 4875 scope.go:117] "RemoveContainer" containerID="ceed5e66a47bd6d646f02be4dc12dfb351b9ff4b9c5ca5df85ecbcfcc83455e0" Oct 07 08:16:32 crc kubenswrapper[4875]: E1007 08:16:32.705966 4875 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ceed5e66a47bd6d646f02be4dc12dfb351b9ff4b9c5ca5df85ecbcfcc83455e0\": container with ID starting with ceed5e66a47bd6d646f02be4dc12dfb351b9ff4b9c5ca5df85ecbcfcc83455e0 not found: ID does not exist" containerID="ceed5e66a47bd6d646f02be4dc12dfb351b9ff4b9c5ca5df85ecbcfcc83455e0" Oct 07 08:16:32 crc kubenswrapper[4875]: I1007 08:16:32.706447 4875 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ceed5e66a47bd6d646f02be4dc12dfb351b9ff4b9c5ca5df85ecbcfcc83455e0"} err="failed to get container status \"ceed5e66a47bd6d646f02be4dc12dfb351b9ff4b9c5ca5df85ecbcfcc83455e0\": rpc error: code = NotFound desc = could not find container \"ceed5e66a47bd6d646f02be4dc12dfb351b9ff4b9c5ca5df85ecbcfcc83455e0\": container with ID starting with ceed5e66a47bd6d646f02be4dc12dfb351b9ff4b9c5ca5df85ecbcfcc83455e0 not found: ID does not exist" Oct 07 08:16:32 crc kubenswrapper[4875]: I1007 08:16:32.905097 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Oct 07 08:16:32 crc kubenswrapper[4875]: I1007 08:16:32.918278 4875 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Oct 07 08:16:32 crc kubenswrapper[4875]: I1007 08:16:32.932497 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Oct 07 08:16:32 crc kubenswrapper[4875]: E1007 08:16:32.932953 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d16f042f-caee-49b6-bd11-7d12f614dc57" containerName="init" Oct 07 08:16:32 crc kubenswrapper[4875]: I1007 08:16:32.932972 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="d16f042f-caee-49b6-bd11-7d12f614dc57" containerName="init" Oct 07 08:16:32 crc kubenswrapper[4875]: E1007 08:16:32.932993 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c56873e1-d427-407d-99f5-bde715855984" containerName="nova-metadata-metadata" Oct 07 08:16:32 crc kubenswrapper[4875]: I1007 08:16:32.933003 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="c56873e1-d427-407d-99f5-bde715855984" containerName="nova-metadata-metadata" Oct 07 08:16:32 crc kubenswrapper[4875]: E1007 08:16:32.933016 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="08172c25-7af0-4bf8-a8e3-aae5b6403176" containerName="nova-manage" Oct 07 08:16:32 crc kubenswrapper[4875]: I1007 08:16:32.933022 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="08172c25-7af0-4bf8-a8e3-aae5b6403176" containerName="nova-manage" Oct 07 08:16:32 crc kubenswrapper[4875]: E1007 08:16:32.933041 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c56873e1-d427-407d-99f5-bde715855984" containerName="nova-metadata-log" Oct 07 08:16:32 crc kubenswrapper[4875]: I1007 08:16:32.933047 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="c56873e1-d427-407d-99f5-bde715855984" containerName="nova-metadata-log" Oct 07 08:16:32 crc kubenswrapper[4875]: E1007 08:16:32.933058 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d16f042f-caee-49b6-bd11-7d12f614dc57" containerName="dnsmasq-dns" Oct 07 08:16:32 crc kubenswrapper[4875]: I1007 08:16:32.933063 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="d16f042f-caee-49b6-bd11-7d12f614dc57" containerName="dnsmasq-dns" Oct 07 08:16:32 crc kubenswrapper[4875]: I1007 08:16:32.933261 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="08172c25-7af0-4bf8-a8e3-aae5b6403176" containerName="nova-manage" Oct 07 08:16:32 crc kubenswrapper[4875]: I1007 08:16:32.933270 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="d16f042f-caee-49b6-bd11-7d12f614dc57" containerName="dnsmasq-dns" Oct 07 08:16:32 crc kubenswrapper[4875]: I1007 08:16:32.933285 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="c56873e1-d427-407d-99f5-bde715855984" containerName="nova-metadata-log" Oct 07 08:16:32 crc kubenswrapper[4875]: I1007 08:16:32.933302 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="c56873e1-d427-407d-99f5-bde715855984" containerName="nova-metadata-metadata" Oct 07 08:16:32 crc kubenswrapper[4875]: I1007 08:16:32.934483 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 07 08:16:32 crc kubenswrapper[4875]: I1007 08:16:32.937166 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Oct 07 08:16:32 crc kubenswrapper[4875]: I1007 08:16:32.937477 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Oct 07 08:16:32 crc kubenswrapper[4875]: I1007 08:16:32.951003 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 07 08:16:32 crc kubenswrapper[4875]: I1007 08:16:32.975106 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/47ec1bb6-1d05-4ed7-b706-6d25c4146e7d-config-data\") pod \"nova-metadata-0\" (UID: \"47ec1bb6-1d05-4ed7-b706-6d25c4146e7d\") " pod="openstack/nova-metadata-0" Oct 07 08:16:32 crc kubenswrapper[4875]: I1007 08:16:32.975165 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/47ec1bb6-1d05-4ed7-b706-6d25c4146e7d-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"47ec1bb6-1d05-4ed7-b706-6d25c4146e7d\") " pod="openstack/nova-metadata-0" Oct 07 08:16:32 crc kubenswrapper[4875]: I1007 08:16:32.975235 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/47ec1bb6-1d05-4ed7-b706-6d25c4146e7d-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"47ec1bb6-1d05-4ed7-b706-6d25c4146e7d\") " pod="openstack/nova-metadata-0" Oct 07 08:16:32 crc kubenswrapper[4875]: I1007 08:16:32.975368 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/47ec1bb6-1d05-4ed7-b706-6d25c4146e7d-logs\") pod \"nova-metadata-0\" (UID: \"47ec1bb6-1d05-4ed7-b706-6d25c4146e7d\") " pod="openstack/nova-metadata-0" Oct 07 08:16:32 crc kubenswrapper[4875]: I1007 08:16:32.975405 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mzvhd\" (UniqueName: \"kubernetes.io/projected/47ec1bb6-1d05-4ed7-b706-6d25c4146e7d-kube-api-access-mzvhd\") pod \"nova-metadata-0\" (UID: \"47ec1bb6-1d05-4ed7-b706-6d25c4146e7d\") " pod="openstack/nova-metadata-0" Oct 07 08:16:33 crc kubenswrapper[4875]: I1007 08:16:33.077487 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/47ec1bb6-1d05-4ed7-b706-6d25c4146e7d-logs\") pod \"nova-metadata-0\" (UID: \"47ec1bb6-1d05-4ed7-b706-6d25c4146e7d\") " pod="openstack/nova-metadata-0" Oct 07 08:16:33 crc kubenswrapper[4875]: I1007 08:16:33.077554 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mzvhd\" (UniqueName: \"kubernetes.io/projected/47ec1bb6-1d05-4ed7-b706-6d25c4146e7d-kube-api-access-mzvhd\") pod \"nova-metadata-0\" (UID: \"47ec1bb6-1d05-4ed7-b706-6d25c4146e7d\") " pod="openstack/nova-metadata-0" Oct 07 08:16:33 crc kubenswrapper[4875]: I1007 08:16:33.077611 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/47ec1bb6-1d05-4ed7-b706-6d25c4146e7d-config-data\") pod \"nova-metadata-0\" (UID: \"47ec1bb6-1d05-4ed7-b706-6d25c4146e7d\") " pod="openstack/nova-metadata-0" Oct 07 08:16:33 crc kubenswrapper[4875]: I1007 08:16:33.077631 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/47ec1bb6-1d05-4ed7-b706-6d25c4146e7d-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"47ec1bb6-1d05-4ed7-b706-6d25c4146e7d\") " pod="openstack/nova-metadata-0" Oct 07 08:16:33 crc kubenswrapper[4875]: I1007 08:16:33.077665 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/47ec1bb6-1d05-4ed7-b706-6d25c4146e7d-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"47ec1bb6-1d05-4ed7-b706-6d25c4146e7d\") " pod="openstack/nova-metadata-0" Oct 07 08:16:33 crc kubenswrapper[4875]: I1007 08:16:33.078040 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/47ec1bb6-1d05-4ed7-b706-6d25c4146e7d-logs\") pod \"nova-metadata-0\" (UID: \"47ec1bb6-1d05-4ed7-b706-6d25c4146e7d\") " pod="openstack/nova-metadata-0" Oct 07 08:16:33 crc kubenswrapper[4875]: I1007 08:16:33.081648 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/47ec1bb6-1d05-4ed7-b706-6d25c4146e7d-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"47ec1bb6-1d05-4ed7-b706-6d25c4146e7d\") " pod="openstack/nova-metadata-0" Oct 07 08:16:33 crc kubenswrapper[4875]: I1007 08:16:33.081848 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/47ec1bb6-1d05-4ed7-b706-6d25c4146e7d-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"47ec1bb6-1d05-4ed7-b706-6d25c4146e7d\") " pod="openstack/nova-metadata-0" Oct 07 08:16:33 crc kubenswrapper[4875]: I1007 08:16:33.089006 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/47ec1bb6-1d05-4ed7-b706-6d25c4146e7d-config-data\") pod \"nova-metadata-0\" (UID: \"47ec1bb6-1d05-4ed7-b706-6d25c4146e7d\") " pod="openstack/nova-metadata-0" Oct 07 08:16:33 crc kubenswrapper[4875]: I1007 08:16:33.099860 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mzvhd\" (UniqueName: \"kubernetes.io/projected/47ec1bb6-1d05-4ed7-b706-6d25c4146e7d-kube-api-access-mzvhd\") pod \"nova-metadata-0\" (UID: \"47ec1bb6-1d05-4ed7-b706-6d25c4146e7d\") " pod="openstack/nova-metadata-0" Oct 07 08:16:33 crc kubenswrapper[4875]: I1007 08:16:33.252635 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 07 08:16:33 crc kubenswrapper[4875]: I1007 08:16:33.711806 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c56873e1-d427-407d-99f5-bde715855984" path="/var/lib/kubelet/pods/c56873e1-d427-407d-99f5-bde715855984/volumes" Oct 07 08:16:33 crc kubenswrapper[4875]: I1007 08:16:33.781677 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 07 08:16:34 crc kubenswrapper[4875]: I1007 08:16:34.351503 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 07 08:16:34 crc kubenswrapper[4875]: I1007 08:16:34.440312 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fe3c3ee8-4355-42b1-9ae1-bdb1cf1e4b93-config-data\") pod \"fe3c3ee8-4355-42b1-9ae1-bdb1cf1e4b93\" (UID: \"fe3c3ee8-4355-42b1-9ae1-bdb1cf1e4b93\") " Oct 07 08:16:34 crc kubenswrapper[4875]: I1007 08:16:34.440566 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fe3c3ee8-4355-42b1-9ae1-bdb1cf1e4b93-combined-ca-bundle\") pod \"fe3c3ee8-4355-42b1-9ae1-bdb1cf1e4b93\" (UID: \"fe3c3ee8-4355-42b1-9ae1-bdb1cf1e4b93\") " Oct 07 08:16:34 crc kubenswrapper[4875]: I1007 08:16:34.440607 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qfzjf\" (UniqueName: \"kubernetes.io/projected/fe3c3ee8-4355-42b1-9ae1-bdb1cf1e4b93-kube-api-access-qfzjf\") pod \"fe3c3ee8-4355-42b1-9ae1-bdb1cf1e4b93\" (UID: \"fe3c3ee8-4355-42b1-9ae1-bdb1cf1e4b93\") " Oct 07 08:16:34 crc kubenswrapper[4875]: I1007 08:16:34.446298 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fe3c3ee8-4355-42b1-9ae1-bdb1cf1e4b93-kube-api-access-qfzjf" (OuterVolumeSpecName: "kube-api-access-qfzjf") pod "fe3c3ee8-4355-42b1-9ae1-bdb1cf1e4b93" (UID: "fe3c3ee8-4355-42b1-9ae1-bdb1cf1e4b93"). InnerVolumeSpecName "kube-api-access-qfzjf". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 08:16:34 crc kubenswrapper[4875]: I1007 08:16:34.466613 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 07 08:16:34 crc kubenswrapper[4875]: I1007 08:16:34.476271 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fe3c3ee8-4355-42b1-9ae1-bdb1cf1e4b93-config-data" (OuterVolumeSpecName: "config-data") pod "fe3c3ee8-4355-42b1-9ae1-bdb1cf1e4b93" (UID: "fe3c3ee8-4355-42b1-9ae1-bdb1cf1e4b93"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:16:34 crc kubenswrapper[4875]: I1007 08:16:34.478669 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fe3c3ee8-4355-42b1-9ae1-bdb1cf1e4b93-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "fe3c3ee8-4355-42b1-9ae1-bdb1cf1e4b93" (UID: "fe3c3ee8-4355-42b1-9ae1-bdb1cf1e4b93"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:16:34 crc kubenswrapper[4875]: I1007 08:16:34.543541 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9f81565c-baf5-40d3-9463-d3732ed1104b-config-data\") pod \"9f81565c-baf5-40d3-9463-d3732ed1104b\" (UID: \"9f81565c-baf5-40d3-9463-d3732ed1104b\") " Oct 07 08:16:34 crc kubenswrapper[4875]: I1007 08:16:34.543713 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hkst2\" (UniqueName: \"kubernetes.io/projected/9f81565c-baf5-40d3-9463-d3732ed1104b-kube-api-access-hkst2\") pod \"9f81565c-baf5-40d3-9463-d3732ed1104b\" (UID: \"9f81565c-baf5-40d3-9463-d3732ed1104b\") " Oct 07 08:16:34 crc kubenswrapper[4875]: I1007 08:16:34.543765 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9f81565c-baf5-40d3-9463-d3732ed1104b-logs\") pod \"9f81565c-baf5-40d3-9463-d3732ed1104b\" (UID: \"9f81565c-baf5-40d3-9463-d3732ed1104b\") " Oct 07 08:16:34 crc kubenswrapper[4875]: I1007 08:16:34.543843 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/9f81565c-baf5-40d3-9463-d3732ed1104b-public-tls-certs\") pod \"9f81565c-baf5-40d3-9463-d3732ed1104b\" (UID: \"9f81565c-baf5-40d3-9463-d3732ed1104b\") " Oct 07 08:16:34 crc kubenswrapper[4875]: I1007 08:16:34.543872 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/9f81565c-baf5-40d3-9463-d3732ed1104b-internal-tls-certs\") pod \"9f81565c-baf5-40d3-9463-d3732ed1104b\" (UID: \"9f81565c-baf5-40d3-9463-d3732ed1104b\") " Oct 07 08:16:34 crc kubenswrapper[4875]: I1007 08:16:34.544051 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9f81565c-baf5-40d3-9463-d3732ed1104b-combined-ca-bundle\") pod \"9f81565c-baf5-40d3-9463-d3732ed1104b\" (UID: \"9f81565c-baf5-40d3-9463-d3732ed1104b\") " Oct 07 08:16:34 crc kubenswrapper[4875]: I1007 08:16:34.544590 4875 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fe3c3ee8-4355-42b1-9ae1-bdb1cf1e4b93-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 08:16:34 crc kubenswrapper[4875]: I1007 08:16:34.544610 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qfzjf\" (UniqueName: \"kubernetes.io/projected/fe3c3ee8-4355-42b1-9ae1-bdb1cf1e4b93-kube-api-access-qfzjf\") on node \"crc\" DevicePath \"\"" Oct 07 08:16:34 crc kubenswrapper[4875]: I1007 08:16:34.544627 4875 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fe3c3ee8-4355-42b1-9ae1-bdb1cf1e4b93-config-data\") on node \"crc\" DevicePath \"\"" Oct 07 08:16:34 crc kubenswrapper[4875]: I1007 08:16:34.546408 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9f81565c-baf5-40d3-9463-d3732ed1104b-logs" (OuterVolumeSpecName: "logs") pod "9f81565c-baf5-40d3-9463-d3732ed1104b" (UID: "9f81565c-baf5-40d3-9463-d3732ed1104b"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 08:16:34 crc kubenswrapper[4875]: I1007 08:16:34.548744 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9f81565c-baf5-40d3-9463-d3732ed1104b-kube-api-access-hkst2" (OuterVolumeSpecName: "kube-api-access-hkst2") pod "9f81565c-baf5-40d3-9463-d3732ed1104b" (UID: "9f81565c-baf5-40d3-9463-d3732ed1104b"). InnerVolumeSpecName "kube-api-access-hkst2". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 08:16:34 crc kubenswrapper[4875]: I1007 08:16:34.580618 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9f81565c-baf5-40d3-9463-d3732ed1104b-config-data" (OuterVolumeSpecName: "config-data") pod "9f81565c-baf5-40d3-9463-d3732ed1104b" (UID: "9f81565c-baf5-40d3-9463-d3732ed1104b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:16:34 crc kubenswrapper[4875]: I1007 08:16:34.583713 4875 generic.go:334] "Generic (PLEG): container finished" podID="fe3c3ee8-4355-42b1-9ae1-bdb1cf1e4b93" containerID="00bfe9245c4977eb15e08016b4ad98460fceea28996dbdab65fbd2537074ccb8" exitCode=0 Oct 07 08:16:34 crc kubenswrapper[4875]: I1007 08:16:34.583770 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 07 08:16:34 crc kubenswrapper[4875]: I1007 08:16:34.583792 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"fe3c3ee8-4355-42b1-9ae1-bdb1cf1e4b93","Type":"ContainerDied","Data":"00bfe9245c4977eb15e08016b4ad98460fceea28996dbdab65fbd2537074ccb8"} Oct 07 08:16:34 crc kubenswrapper[4875]: I1007 08:16:34.583914 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"fe3c3ee8-4355-42b1-9ae1-bdb1cf1e4b93","Type":"ContainerDied","Data":"de841348632b3c11537aeb5ca9ba34346573998e9e5428fc4542bb3f08f8d391"} Oct 07 08:16:34 crc kubenswrapper[4875]: I1007 08:16:34.583956 4875 scope.go:117] "RemoveContainer" containerID="00bfe9245c4977eb15e08016b4ad98460fceea28996dbdab65fbd2537074ccb8" Oct 07 08:16:34 crc kubenswrapper[4875]: I1007 08:16:34.592110 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9f81565c-baf5-40d3-9463-d3732ed1104b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9f81565c-baf5-40d3-9463-d3732ed1104b" (UID: "9f81565c-baf5-40d3-9463-d3732ed1104b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:16:34 crc kubenswrapper[4875]: I1007 08:16:34.597747 4875 generic.go:334] "Generic (PLEG): container finished" podID="9f81565c-baf5-40d3-9463-d3732ed1104b" containerID="4b3fbe1b2b70b0687f5ccb59de2f319eec054023e430c776d3d0cf6c28686f6f" exitCode=0 Oct 07 08:16:34 crc kubenswrapper[4875]: I1007 08:16:34.597827 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"9f81565c-baf5-40d3-9463-d3732ed1104b","Type":"ContainerDied","Data":"4b3fbe1b2b70b0687f5ccb59de2f319eec054023e430c776d3d0cf6c28686f6f"} Oct 07 08:16:34 crc kubenswrapper[4875]: I1007 08:16:34.597857 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"9f81565c-baf5-40d3-9463-d3732ed1104b","Type":"ContainerDied","Data":"e608e6e2097cb84282d7373c2f3b177f40cef15ed63394e464518dfb26ec1edc"} Oct 07 08:16:34 crc kubenswrapper[4875]: I1007 08:16:34.597988 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 07 08:16:34 crc kubenswrapper[4875]: I1007 08:16:34.600910 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"47ec1bb6-1d05-4ed7-b706-6d25c4146e7d","Type":"ContainerStarted","Data":"be72d4982e090aed1e67b2bec59ec03ca8c75f16d067982779f620c67effab9a"} Oct 07 08:16:34 crc kubenswrapper[4875]: I1007 08:16:34.600941 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"47ec1bb6-1d05-4ed7-b706-6d25c4146e7d","Type":"ContainerStarted","Data":"b7345cbb444cf1954f4ce5a1095a93fef70451e8d863a6a69ee29861badd4ea7"} Oct 07 08:16:34 crc kubenswrapper[4875]: I1007 08:16:34.600955 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"47ec1bb6-1d05-4ed7-b706-6d25c4146e7d","Type":"ContainerStarted","Data":"6c438a091c22ed1bdb2377bf7533a51d240099b0c01f32d6fecea5eb4c5690e0"} Oct 07 08:16:34 crc kubenswrapper[4875]: I1007 08:16:34.610472 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9f81565c-baf5-40d3-9463-d3732ed1104b-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "9f81565c-baf5-40d3-9463-d3732ed1104b" (UID: "9f81565c-baf5-40d3-9463-d3732ed1104b"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:16:34 crc kubenswrapper[4875]: I1007 08:16:34.615946 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9f81565c-baf5-40d3-9463-d3732ed1104b-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "9f81565c-baf5-40d3-9463-d3732ed1104b" (UID: "9f81565c-baf5-40d3-9463-d3732ed1104b"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:16:34 crc kubenswrapper[4875]: I1007 08:16:34.633567 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.6335416240000002 podStartE2EDuration="2.633541624s" podCreationTimestamp="2025-10-07 08:16:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 08:16:34.628162841 +0000 UTC m=+1219.587933384" watchObservedRunningTime="2025-10-07 08:16:34.633541624 +0000 UTC m=+1219.593312167" Oct 07 08:16:34 crc kubenswrapper[4875]: I1007 08:16:34.636214 4875 scope.go:117] "RemoveContainer" containerID="00bfe9245c4977eb15e08016b4ad98460fceea28996dbdab65fbd2537074ccb8" Oct 07 08:16:34 crc kubenswrapper[4875]: E1007 08:16:34.639357 4875 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"00bfe9245c4977eb15e08016b4ad98460fceea28996dbdab65fbd2537074ccb8\": container with ID starting with 00bfe9245c4977eb15e08016b4ad98460fceea28996dbdab65fbd2537074ccb8 not found: ID does not exist" containerID="00bfe9245c4977eb15e08016b4ad98460fceea28996dbdab65fbd2537074ccb8" Oct 07 08:16:34 crc kubenswrapper[4875]: I1007 08:16:34.639423 4875 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"00bfe9245c4977eb15e08016b4ad98460fceea28996dbdab65fbd2537074ccb8"} err="failed to get container status \"00bfe9245c4977eb15e08016b4ad98460fceea28996dbdab65fbd2537074ccb8\": rpc error: code = NotFound desc = could not find container \"00bfe9245c4977eb15e08016b4ad98460fceea28996dbdab65fbd2537074ccb8\": container with ID starting with 00bfe9245c4977eb15e08016b4ad98460fceea28996dbdab65fbd2537074ccb8 not found: ID does not exist" Oct 07 08:16:34 crc kubenswrapper[4875]: I1007 08:16:34.639449 4875 scope.go:117] "RemoveContainer" containerID="4b3fbe1b2b70b0687f5ccb59de2f319eec054023e430c776d3d0cf6c28686f6f" Oct 07 08:16:34 crc kubenswrapper[4875]: I1007 08:16:34.650611 4875 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9f81565c-baf5-40d3-9463-d3732ed1104b-config-data\") on node \"crc\" DevicePath \"\"" Oct 07 08:16:34 crc kubenswrapper[4875]: I1007 08:16:34.651027 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hkst2\" (UniqueName: \"kubernetes.io/projected/9f81565c-baf5-40d3-9463-d3732ed1104b-kube-api-access-hkst2\") on node \"crc\" DevicePath \"\"" Oct 07 08:16:34 crc kubenswrapper[4875]: I1007 08:16:34.651039 4875 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9f81565c-baf5-40d3-9463-d3732ed1104b-logs\") on node \"crc\" DevicePath \"\"" Oct 07 08:16:34 crc kubenswrapper[4875]: I1007 08:16:34.651051 4875 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/9f81565c-baf5-40d3-9463-d3732ed1104b-public-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 07 08:16:34 crc kubenswrapper[4875]: I1007 08:16:34.651063 4875 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/9f81565c-baf5-40d3-9463-d3732ed1104b-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 07 08:16:34 crc kubenswrapper[4875]: I1007 08:16:34.651072 4875 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9f81565c-baf5-40d3-9463-d3732ed1104b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 08:16:34 crc kubenswrapper[4875]: I1007 08:16:34.654750 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Oct 07 08:16:34 crc kubenswrapper[4875]: I1007 08:16:34.665787 4875 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Oct 07 08:16:34 crc kubenswrapper[4875]: I1007 08:16:34.676522 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Oct 07 08:16:34 crc kubenswrapper[4875]: E1007 08:16:34.676973 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9f81565c-baf5-40d3-9463-d3732ed1104b" containerName="nova-api-api" Oct 07 08:16:34 crc kubenswrapper[4875]: I1007 08:16:34.676992 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="9f81565c-baf5-40d3-9463-d3732ed1104b" containerName="nova-api-api" Oct 07 08:16:34 crc kubenswrapper[4875]: E1007 08:16:34.677023 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fe3c3ee8-4355-42b1-9ae1-bdb1cf1e4b93" containerName="nova-scheduler-scheduler" Oct 07 08:16:34 crc kubenswrapper[4875]: I1007 08:16:34.677030 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="fe3c3ee8-4355-42b1-9ae1-bdb1cf1e4b93" containerName="nova-scheduler-scheduler" Oct 07 08:16:34 crc kubenswrapper[4875]: E1007 08:16:34.677049 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9f81565c-baf5-40d3-9463-d3732ed1104b" containerName="nova-api-log" Oct 07 08:16:34 crc kubenswrapper[4875]: I1007 08:16:34.677057 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="9f81565c-baf5-40d3-9463-d3732ed1104b" containerName="nova-api-log" Oct 07 08:16:34 crc kubenswrapper[4875]: I1007 08:16:34.677220 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="9f81565c-baf5-40d3-9463-d3732ed1104b" containerName="nova-api-api" Oct 07 08:16:34 crc kubenswrapper[4875]: I1007 08:16:34.677254 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="fe3c3ee8-4355-42b1-9ae1-bdb1cf1e4b93" containerName="nova-scheduler-scheduler" Oct 07 08:16:34 crc kubenswrapper[4875]: I1007 08:16:34.677267 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="9f81565c-baf5-40d3-9463-d3732ed1104b" containerName="nova-api-log" Oct 07 08:16:34 crc kubenswrapper[4875]: I1007 08:16:34.677982 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 07 08:16:34 crc kubenswrapper[4875]: I1007 08:16:34.680627 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Oct 07 08:16:34 crc kubenswrapper[4875]: I1007 08:16:34.690269 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Oct 07 08:16:34 crc kubenswrapper[4875]: I1007 08:16:34.693864 4875 scope.go:117] "RemoveContainer" containerID="17b4f82a3009d9461017dbba2257e34ecf42c3c06d2c6188b473ff138064c44c" Oct 07 08:16:34 crc kubenswrapper[4875]: I1007 08:16:34.718145 4875 scope.go:117] "RemoveContainer" containerID="4b3fbe1b2b70b0687f5ccb59de2f319eec054023e430c776d3d0cf6c28686f6f" Oct 07 08:16:34 crc kubenswrapper[4875]: E1007 08:16:34.720857 4875 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4b3fbe1b2b70b0687f5ccb59de2f319eec054023e430c776d3d0cf6c28686f6f\": container with ID starting with 4b3fbe1b2b70b0687f5ccb59de2f319eec054023e430c776d3d0cf6c28686f6f not found: ID does not exist" containerID="4b3fbe1b2b70b0687f5ccb59de2f319eec054023e430c776d3d0cf6c28686f6f" Oct 07 08:16:34 crc kubenswrapper[4875]: I1007 08:16:34.720988 4875 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4b3fbe1b2b70b0687f5ccb59de2f319eec054023e430c776d3d0cf6c28686f6f"} err="failed to get container status \"4b3fbe1b2b70b0687f5ccb59de2f319eec054023e430c776d3d0cf6c28686f6f\": rpc error: code = NotFound desc = could not find container \"4b3fbe1b2b70b0687f5ccb59de2f319eec054023e430c776d3d0cf6c28686f6f\": container with ID starting with 4b3fbe1b2b70b0687f5ccb59de2f319eec054023e430c776d3d0cf6c28686f6f not found: ID does not exist" Oct 07 08:16:34 crc kubenswrapper[4875]: I1007 08:16:34.721083 4875 scope.go:117] "RemoveContainer" containerID="17b4f82a3009d9461017dbba2257e34ecf42c3c06d2c6188b473ff138064c44c" Oct 07 08:16:34 crc kubenswrapper[4875]: E1007 08:16:34.724139 4875 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"17b4f82a3009d9461017dbba2257e34ecf42c3c06d2c6188b473ff138064c44c\": container with ID starting with 17b4f82a3009d9461017dbba2257e34ecf42c3c06d2c6188b473ff138064c44c not found: ID does not exist" containerID="17b4f82a3009d9461017dbba2257e34ecf42c3c06d2c6188b473ff138064c44c" Oct 07 08:16:34 crc kubenswrapper[4875]: I1007 08:16:34.724233 4875 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"17b4f82a3009d9461017dbba2257e34ecf42c3c06d2c6188b473ff138064c44c"} err="failed to get container status \"17b4f82a3009d9461017dbba2257e34ecf42c3c06d2c6188b473ff138064c44c\": rpc error: code = NotFound desc = could not find container \"17b4f82a3009d9461017dbba2257e34ecf42c3c06d2c6188b473ff138064c44c\": container with ID starting with 17b4f82a3009d9461017dbba2257e34ecf42c3c06d2c6188b473ff138064c44c not found: ID does not exist" Oct 07 08:16:34 crc kubenswrapper[4875]: I1007 08:16:34.753869 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d49ca1ef-7891-416e-9d67-d17b4b031624-config-data\") pod \"nova-scheduler-0\" (UID: \"d49ca1ef-7891-416e-9d67-d17b4b031624\") " pod="openstack/nova-scheduler-0" Oct 07 08:16:34 crc kubenswrapper[4875]: I1007 08:16:34.754054 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2jdxf\" (UniqueName: \"kubernetes.io/projected/d49ca1ef-7891-416e-9d67-d17b4b031624-kube-api-access-2jdxf\") pod \"nova-scheduler-0\" (UID: \"d49ca1ef-7891-416e-9d67-d17b4b031624\") " pod="openstack/nova-scheduler-0" Oct 07 08:16:34 crc kubenswrapper[4875]: I1007 08:16:34.754100 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d49ca1ef-7891-416e-9d67-d17b4b031624-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"d49ca1ef-7891-416e-9d67-d17b4b031624\") " pod="openstack/nova-scheduler-0" Oct 07 08:16:34 crc kubenswrapper[4875]: I1007 08:16:34.856513 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2jdxf\" (UniqueName: \"kubernetes.io/projected/d49ca1ef-7891-416e-9d67-d17b4b031624-kube-api-access-2jdxf\") pod \"nova-scheduler-0\" (UID: \"d49ca1ef-7891-416e-9d67-d17b4b031624\") " pod="openstack/nova-scheduler-0" Oct 07 08:16:34 crc kubenswrapper[4875]: I1007 08:16:34.856918 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d49ca1ef-7891-416e-9d67-d17b4b031624-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"d49ca1ef-7891-416e-9d67-d17b4b031624\") " pod="openstack/nova-scheduler-0" Oct 07 08:16:34 crc kubenswrapper[4875]: I1007 08:16:34.857213 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d49ca1ef-7891-416e-9d67-d17b4b031624-config-data\") pod \"nova-scheduler-0\" (UID: \"d49ca1ef-7891-416e-9d67-d17b4b031624\") " pod="openstack/nova-scheduler-0" Oct 07 08:16:34 crc kubenswrapper[4875]: I1007 08:16:34.867086 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d49ca1ef-7891-416e-9d67-d17b4b031624-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"d49ca1ef-7891-416e-9d67-d17b4b031624\") " pod="openstack/nova-scheduler-0" Oct 07 08:16:34 crc kubenswrapper[4875]: I1007 08:16:34.869104 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d49ca1ef-7891-416e-9d67-d17b4b031624-config-data\") pod \"nova-scheduler-0\" (UID: \"d49ca1ef-7891-416e-9d67-d17b4b031624\") " pod="openstack/nova-scheduler-0" Oct 07 08:16:34 crc kubenswrapper[4875]: I1007 08:16:34.873025 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2jdxf\" (UniqueName: \"kubernetes.io/projected/d49ca1ef-7891-416e-9d67-d17b4b031624-kube-api-access-2jdxf\") pod \"nova-scheduler-0\" (UID: \"d49ca1ef-7891-416e-9d67-d17b4b031624\") " pod="openstack/nova-scheduler-0" Oct 07 08:16:34 crc kubenswrapper[4875]: I1007 08:16:34.943242 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Oct 07 08:16:34 crc kubenswrapper[4875]: I1007 08:16:34.956647 4875 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Oct 07 08:16:34 crc kubenswrapper[4875]: I1007 08:16:34.976707 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Oct 07 08:16:34 crc kubenswrapper[4875]: I1007 08:16:34.979008 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 07 08:16:34 crc kubenswrapper[4875]: I1007 08:16:34.983615 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Oct 07 08:16:34 crc kubenswrapper[4875]: I1007 08:16:34.983839 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Oct 07 08:16:34 crc kubenswrapper[4875]: I1007 08:16:34.983982 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Oct 07 08:16:35 crc kubenswrapper[4875]: I1007 08:16:35.021984 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 07 08:16:35 crc kubenswrapper[4875]: I1007 08:16:35.041318 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 07 08:16:35 crc kubenswrapper[4875]: I1007 08:16:35.060681 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/3774daf2-c509-494e-81f3-9cadf5a30459-public-tls-certs\") pod \"nova-api-0\" (UID: \"3774daf2-c509-494e-81f3-9cadf5a30459\") " pod="openstack/nova-api-0" Oct 07 08:16:35 crc kubenswrapper[4875]: I1007 08:16:35.060755 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3774daf2-c509-494e-81f3-9cadf5a30459-config-data\") pod \"nova-api-0\" (UID: \"3774daf2-c509-494e-81f3-9cadf5a30459\") " pod="openstack/nova-api-0" Oct 07 08:16:35 crc kubenswrapper[4875]: I1007 08:16:35.060856 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3774daf2-c509-494e-81f3-9cadf5a30459-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"3774daf2-c509-494e-81f3-9cadf5a30459\") " pod="openstack/nova-api-0" Oct 07 08:16:35 crc kubenswrapper[4875]: I1007 08:16:35.061002 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3774daf2-c509-494e-81f3-9cadf5a30459-logs\") pod \"nova-api-0\" (UID: \"3774daf2-c509-494e-81f3-9cadf5a30459\") " pod="openstack/nova-api-0" Oct 07 08:16:35 crc kubenswrapper[4875]: I1007 08:16:35.061370 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7gwdd\" (UniqueName: \"kubernetes.io/projected/3774daf2-c509-494e-81f3-9cadf5a30459-kube-api-access-7gwdd\") pod \"nova-api-0\" (UID: \"3774daf2-c509-494e-81f3-9cadf5a30459\") " pod="openstack/nova-api-0" Oct 07 08:16:35 crc kubenswrapper[4875]: I1007 08:16:35.061418 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/3774daf2-c509-494e-81f3-9cadf5a30459-internal-tls-certs\") pod \"nova-api-0\" (UID: \"3774daf2-c509-494e-81f3-9cadf5a30459\") " pod="openstack/nova-api-0" Oct 07 08:16:35 crc kubenswrapper[4875]: I1007 08:16:35.164131 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3774daf2-c509-494e-81f3-9cadf5a30459-logs\") pod \"nova-api-0\" (UID: \"3774daf2-c509-494e-81f3-9cadf5a30459\") " pod="openstack/nova-api-0" Oct 07 08:16:35 crc kubenswrapper[4875]: I1007 08:16:35.164603 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7gwdd\" (UniqueName: \"kubernetes.io/projected/3774daf2-c509-494e-81f3-9cadf5a30459-kube-api-access-7gwdd\") pod \"nova-api-0\" (UID: \"3774daf2-c509-494e-81f3-9cadf5a30459\") " pod="openstack/nova-api-0" Oct 07 08:16:35 crc kubenswrapper[4875]: I1007 08:16:35.164653 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/3774daf2-c509-494e-81f3-9cadf5a30459-internal-tls-certs\") pod \"nova-api-0\" (UID: \"3774daf2-c509-494e-81f3-9cadf5a30459\") " pod="openstack/nova-api-0" Oct 07 08:16:35 crc kubenswrapper[4875]: I1007 08:16:35.164677 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/3774daf2-c509-494e-81f3-9cadf5a30459-public-tls-certs\") pod \"nova-api-0\" (UID: \"3774daf2-c509-494e-81f3-9cadf5a30459\") " pod="openstack/nova-api-0" Oct 07 08:16:35 crc kubenswrapper[4875]: I1007 08:16:35.164697 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3774daf2-c509-494e-81f3-9cadf5a30459-config-data\") pod \"nova-api-0\" (UID: \"3774daf2-c509-494e-81f3-9cadf5a30459\") " pod="openstack/nova-api-0" Oct 07 08:16:35 crc kubenswrapper[4875]: I1007 08:16:35.164748 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3774daf2-c509-494e-81f3-9cadf5a30459-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"3774daf2-c509-494e-81f3-9cadf5a30459\") " pod="openstack/nova-api-0" Oct 07 08:16:35 crc kubenswrapper[4875]: I1007 08:16:35.164815 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3774daf2-c509-494e-81f3-9cadf5a30459-logs\") pod \"nova-api-0\" (UID: \"3774daf2-c509-494e-81f3-9cadf5a30459\") " pod="openstack/nova-api-0" Oct 07 08:16:35 crc kubenswrapper[4875]: I1007 08:16:35.172237 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/3774daf2-c509-494e-81f3-9cadf5a30459-public-tls-certs\") pod \"nova-api-0\" (UID: \"3774daf2-c509-494e-81f3-9cadf5a30459\") " pod="openstack/nova-api-0" Oct 07 08:16:35 crc kubenswrapper[4875]: I1007 08:16:35.172796 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/3774daf2-c509-494e-81f3-9cadf5a30459-internal-tls-certs\") pod \"nova-api-0\" (UID: \"3774daf2-c509-494e-81f3-9cadf5a30459\") " pod="openstack/nova-api-0" Oct 07 08:16:35 crc kubenswrapper[4875]: I1007 08:16:35.179837 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3774daf2-c509-494e-81f3-9cadf5a30459-config-data\") pod \"nova-api-0\" (UID: \"3774daf2-c509-494e-81f3-9cadf5a30459\") " pod="openstack/nova-api-0" Oct 07 08:16:35 crc kubenswrapper[4875]: I1007 08:16:35.181807 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3774daf2-c509-494e-81f3-9cadf5a30459-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"3774daf2-c509-494e-81f3-9cadf5a30459\") " pod="openstack/nova-api-0" Oct 07 08:16:35 crc kubenswrapper[4875]: I1007 08:16:35.184175 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7gwdd\" (UniqueName: \"kubernetes.io/projected/3774daf2-c509-494e-81f3-9cadf5a30459-kube-api-access-7gwdd\") pod \"nova-api-0\" (UID: \"3774daf2-c509-494e-81f3-9cadf5a30459\") " pod="openstack/nova-api-0" Oct 07 08:16:35 crc kubenswrapper[4875]: I1007 08:16:35.464826 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 07 08:16:35 crc kubenswrapper[4875]: I1007 08:16:35.505509 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Oct 07 08:16:35 crc kubenswrapper[4875]: W1007 08:16:35.514527 4875 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd49ca1ef_7891_416e_9d67_d17b4b031624.slice/crio-ca2cec7061bd08d551bc3d1b80bbbf5fae399d670bf2a0854b79221e93e2cba5 WatchSource:0}: Error finding container ca2cec7061bd08d551bc3d1b80bbbf5fae399d670bf2a0854b79221e93e2cba5: Status 404 returned error can't find the container with id ca2cec7061bd08d551bc3d1b80bbbf5fae399d670bf2a0854b79221e93e2cba5 Oct 07 08:16:35 crc kubenswrapper[4875]: I1007 08:16:35.623130 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"d49ca1ef-7891-416e-9d67-d17b4b031624","Type":"ContainerStarted","Data":"ca2cec7061bd08d551bc3d1b80bbbf5fae399d670bf2a0854b79221e93e2cba5"} Oct 07 08:16:35 crc kubenswrapper[4875]: I1007 08:16:35.710660 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9f81565c-baf5-40d3-9463-d3732ed1104b" path="/var/lib/kubelet/pods/9f81565c-baf5-40d3-9463-d3732ed1104b/volumes" Oct 07 08:16:35 crc kubenswrapper[4875]: I1007 08:16:35.711327 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fe3c3ee8-4355-42b1-9ae1-bdb1cf1e4b93" path="/var/lib/kubelet/pods/fe3c3ee8-4355-42b1-9ae1-bdb1cf1e4b93/volumes" Oct 07 08:16:36 crc kubenswrapper[4875]: I1007 08:16:36.346244 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 07 08:16:36 crc kubenswrapper[4875]: I1007 08:16:36.636357 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"3774daf2-c509-494e-81f3-9cadf5a30459","Type":"ContainerStarted","Data":"2effbd0ddcdaf45a1eab23f18264020bf812849ce7c62877d5c546177d955f22"} Oct 07 08:16:36 crc kubenswrapper[4875]: I1007 08:16:36.636789 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"3774daf2-c509-494e-81f3-9cadf5a30459","Type":"ContainerStarted","Data":"9cafc4b665fcc7f72f90eef8718d99c8ddb4f2186699a6cc8e1e527ea20f5f8c"} Oct 07 08:16:36 crc kubenswrapper[4875]: I1007 08:16:36.638365 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"d49ca1ef-7891-416e-9d67-d17b4b031624","Type":"ContainerStarted","Data":"5688b84781846356d73dd50991b7b27448845008526f4b260971b854a0c7ddb4"} Oct 07 08:16:36 crc kubenswrapper[4875]: I1007 08:16:36.659286 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.659270347 podStartE2EDuration="2.659270347s" podCreationTimestamp="2025-10-07 08:16:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 08:16:36.657447769 +0000 UTC m=+1221.617218322" watchObservedRunningTime="2025-10-07 08:16:36.659270347 +0000 UTC m=+1221.619040890" Oct 07 08:16:37 crc kubenswrapper[4875]: I1007 08:16:37.650838 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"3774daf2-c509-494e-81f3-9cadf5a30459","Type":"ContainerStarted","Data":"5c970e52861aac5be303b3e59ace0dee964ac7af5c14c1edc5fbb4fe80d8c29a"} Oct 07 08:16:37 crc kubenswrapper[4875]: I1007 08:16:37.685533 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=3.685505896 podStartE2EDuration="3.685505896s" podCreationTimestamp="2025-10-07 08:16:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 08:16:37.67120126 +0000 UTC m=+1222.630971823" watchObservedRunningTime="2025-10-07 08:16:37.685505896 +0000 UTC m=+1222.645276439" Oct 07 08:16:38 crc kubenswrapper[4875]: I1007 08:16:38.253503 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Oct 07 08:16:38 crc kubenswrapper[4875]: I1007 08:16:38.253640 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Oct 07 08:16:40 crc kubenswrapper[4875]: I1007 08:16:40.023832 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Oct 07 08:16:43 crc kubenswrapper[4875]: I1007 08:16:43.253944 4875 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Oct 07 08:16:43 crc kubenswrapper[4875]: I1007 08:16:43.254602 4875 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Oct 07 08:16:44 crc kubenswrapper[4875]: I1007 08:16:44.269013 4875 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="47ec1bb6-1d05-4ed7-b706-6d25c4146e7d" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.205:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Oct 07 08:16:44 crc kubenswrapper[4875]: I1007 08:16:44.269013 4875 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="47ec1bb6-1d05-4ed7-b706-6d25c4146e7d" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.205:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Oct 07 08:16:45 crc kubenswrapper[4875]: I1007 08:16:45.024292 4875 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Oct 07 08:16:45 crc kubenswrapper[4875]: I1007 08:16:45.053083 4875 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Oct 07 08:16:45 crc kubenswrapper[4875]: I1007 08:16:45.466209 4875 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Oct 07 08:16:45 crc kubenswrapper[4875]: I1007 08:16:45.466304 4875 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Oct 07 08:16:45 crc kubenswrapper[4875]: I1007 08:16:45.758768 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Oct 07 08:16:46 crc kubenswrapper[4875]: I1007 08:16:46.479122 4875 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="3774daf2-c509-494e-81f3-9cadf5a30459" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.0.207:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Oct 07 08:16:46 crc kubenswrapper[4875]: I1007 08:16:46.479159 4875 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="3774daf2-c509-494e-81f3-9cadf5a30459" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.0.207:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Oct 07 08:16:47 crc kubenswrapper[4875]: I1007 08:16:47.838243 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Oct 07 08:16:53 crc kubenswrapper[4875]: I1007 08:16:53.263024 4875 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Oct 07 08:16:53 crc kubenswrapper[4875]: I1007 08:16:53.263856 4875 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Oct 07 08:16:53 crc kubenswrapper[4875]: I1007 08:16:53.271297 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Oct 07 08:16:53 crc kubenswrapper[4875]: I1007 08:16:53.271842 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Oct 07 08:16:55 crc kubenswrapper[4875]: I1007 08:16:55.472948 4875 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Oct 07 08:16:55 crc kubenswrapper[4875]: I1007 08:16:55.474691 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Oct 07 08:16:55 crc kubenswrapper[4875]: I1007 08:16:55.477691 4875 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Oct 07 08:16:55 crc kubenswrapper[4875]: I1007 08:16:55.483383 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Oct 07 08:16:55 crc kubenswrapper[4875]: I1007 08:16:55.830037 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Oct 07 08:16:55 crc kubenswrapper[4875]: I1007 08:16:55.837961 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Oct 07 08:17:03 crc kubenswrapper[4875]: I1007 08:17:03.634371 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Oct 07 08:17:04 crc kubenswrapper[4875]: I1007 08:17:04.672218 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 07 08:17:08 crc kubenswrapper[4875]: I1007 08:17:08.410001 4875 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-server-0" podUID="06c908ee-f087-4e43-904f-5cc1e01a2464" containerName="rabbitmq" containerID="cri-o://541c994fd394d6919cd0ba24e91df28102524f45378c6b8fac295753e60b09ba" gracePeriod=604796 Oct 07 08:17:09 crc kubenswrapper[4875]: I1007 08:17:09.817094 4875 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-cell1-server-0" podUID="169a8fe1-831d-46f5-b939-e1507c89453e" containerName="rabbitmq" containerID="cri-o://6f735bc5958aa92a6b5ff7e984af963a2d9883c708b83e365b76df8870a80031" gracePeriod=604795 Oct 07 08:17:14 crc kubenswrapper[4875]: I1007 08:17:14.911307 4875 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-cell1-server-0" podUID="169a8fe1-831d-46f5-b939-e1507c89453e" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.100:5671: connect: connection refused" Oct 07 08:17:15 crc kubenswrapper[4875]: I1007 08:17:15.045116 4875 generic.go:334] "Generic (PLEG): container finished" podID="06c908ee-f087-4e43-904f-5cc1e01a2464" containerID="541c994fd394d6919cd0ba24e91df28102524f45378c6b8fac295753e60b09ba" exitCode=0 Oct 07 08:17:15 crc kubenswrapper[4875]: I1007 08:17:15.045605 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"06c908ee-f087-4e43-904f-5cc1e01a2464","Type":"ContainerDied","Data":"541c994fd394d6919cd0ba24e91df28102524f45378c6b8fac295753e60b09ba"} Oct 07 08:17:15 crc kubenswrapper[4875]: I1007 08:17:15.045673 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"06c908ee-f087-4e43-904f-5cc1e01a2464","Type":"ContainerDied","Data":"d9f59a48e106895daf6c55d2ad91d258ceda166df17cc233fcdab1cc4d24b26a"} Oct 07 08:17:15 crc kubenswrapper[4875]: I1007 08:17:15.045715 4875 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d9f59a48e106895daf6c55d2ad91d258ceda166df17cc233fcdab1cc4d24b26a" Oct 07 08:17:15 crc kubenswrapper[4875]: I1007 08:17:15.103657 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Oct 07 08:17:15 crc kubenswrapper[4875]: I1007 08:17:15.200079 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/06c908ee-f087-4e43-904f-5cc1e01a2464-pod-info\") pod \"06c908ee-f087-4e43-904f-5cc1e01a2464\" (UID: \"06c908ee-f087-4e43-904f-5cc1e01a2464\") " Oct 07 08:17:15 crc kubenswrapper[4875]: I1007 08:17:15.200178 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/06c908ee-f087-4e43-904f-5cc1e01a2464-server-conf\") pod \"06c908ee-f087-4e43-904f-5cc1e01a2464\" (UID: \"06c908ee-f087-4e43-904f-5cc1e01a2464\") " Oct 07 08:17:15 crc kubenswrapper[4875]: I1007 08:17:15.200236 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/06c908ee-f087-4e43-904f-5cc1e01a2464-config-data\") pod \"06c908ee-f087-4e43-904f-5cc1e01a2464\" (UID: \"06c908ee-f087-4e43-904f-5cc1e01a2464\") " Oct 07 08:17:15 crc kubenswrapper[4875]: I1007 08:17:15.200277 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/06c908ee-f087-4e43-904f-5cc1e01a2464-plugins-conf\") pod \"06c908ee-f087-4e43-904f-5cc1e01a2464\" (UID: \"06c908ee-f087-4e43-904f-5cc1e01a2464\") " Oct 07 08:17:15 crc kubenswrapper[4875]: I1007 08:17:15.200348 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"06c908ee-f087-4e43-904f-5cc1e01a2464\" (UID: \"06c908ee-f087-4e43-904f-5cc1e01a2464\") " Oct 07 08:17:15 crc kubenswrapper[4875]: I1007 08:17:15.200375 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/06c908ee-f087-4e43-904f-5cc1e01a2464-erlang-cookie-secret\") pod \"06c908ee-f087-4e43-904f-5cc1e01a2464\" (UID: \"06c908ee-f087-4e43-904f-5cc1e01a2464\") " Oct 07 08:17:15 crc kubenswrapper[4875]: I1007 08:17:15.200442 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jb85f\" (UniqueName: \"kubernetes.io/projected/06c908ee-f087-4e43-904f-5cc1e01a2464-kube-api-access-jb85f\") pod \"06c908ee-f087-4e43-904f-5cc1e01a2464\" (UID: \"06c908ee-f087-4e43-904f-5cc1e01a2464\") " Oct 07 08:17:15 crc kubenswrapper[4875]: I1007 08:17:15.200469 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/06c908ee-f087-4e43-904f-5cc1e01a2464-rabbitmq-erlang-cookie\") pod \"06c908ee-f087-4e43-904f-5cc1e01a2464\" (UID: \"06c908ee-f087-4e43-904f-5cc1e01a2464\") " Oct 07 08:17:15 crc kubenswrapper[4875]: I1007 08:17:15.200518 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/06c908ee-f087-4e43-904f-5cc1e01a2464-rabbitmq-tls\") pod \"06c908ee-f087-4e43-904f-5cc1e01a2464\" (UID: \"06c908ee-f087-4e43-904f-5cc1e01a2464\") " Oct 07 08:17:15 crc kubenswrapper[4875]: I1007 08:17:15.200553 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/06c908ee-f087-4e43-904f-5cc1e01a2464-rabbitmq-confd\") pod \"06c908ee-f087-4e43-904f-5cc1e01a2464\" (UID: \"06c908ee-f087-4e43-904f-5cc1e01a2464\") " Oct 07 08:17:15 crc kubenswrapper[4875]: I1007 08:17:15.200577 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/06c908ee-f087-4e43-904f-5cc1e01a2464-rabbitmq-plugins\") pod \"06c908ee-f087-4e43-904f-5cc1e01a2464\" (UID: \"06c908ee-f087-4e43-904f-5cc1e01a2464\") " Oct 07 08:17:15 crc kubenswrapper[4875]: I1007 08:17:15.201569 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/06c908ee-f087-4e43-904f-5cc1e01a2464-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "06c908ee-f087-4e43-904f-5cc1e01a2464" (UID: "06c908ee-f087-4e43-904f-5cc1e01a2464"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 08:17:15 crc kubenswrapper[4875]: I1007 08:17:15.201635 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/06c908ee-f087-4e43-904f-5cc1e01a2464-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "06c908ee-f087-4e43-904f-5cc1e01a2464" (UID: "06c908ee-f087-4e43-904f-5cc1e01a2464"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 08:17:15 crc kubenswrapper[4875]: I1007 08:17:15.201813 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/06c908ee-f087-4e43-904f-5cc1e01a2464-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "06c908ee-f087-4e43-904f-5cc1e01a2464" (UID: "06c908ee-f087-4e43-904f-5cc1e01a2464"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 08:17:15 crc kubenswrapper[4875]: I1007 08:17:15.207311 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage01-crc" (OuterVolumeSpecName: "persistence") pod "06c908ee-f087-4e43-904f-5cc1e01a2464" (UID: "06c908ee-f087-4e43-904f-5cc1e01a2464"). InnerVolumeSpecName "local-storage01-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 07 08:17:15 crc kubenswrapper[4875]: I1007 08:17:15.213660 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/06c908ee-f087-4e43-904f-5cc1e01a2464-pod-info" (OuterVolumeSpecName: "pod-info") pod "06c908ee-f087-4e43-904f-5cc1e01a2464" (UID: "06c908ee-f087-4e43-904f-5cc1e01a2464"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Oct 07 08:17:15 crc kubenswrapper[4875]: I1007 08:17:15.213713 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/06c908ee-f087-4e43-904f-5cc1e01a2464-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "06c908ee-f087-4e43-904f-5cc1e01a2464" (UID: "06c908ee-f087-4e43-904f-5cc1e01a2464"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 08:17:15 crc kubenswrapper[4875]: I1007 08:17:15.219711 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/06c908ee-f087-4e43-904f-5cc1e01a2464-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "06c908ee-f087-4e43-904f-5cc1e01a2464" (UID: "06c908ee-f087-4e43-904f-5cc1e01a2464"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:17:15 crc kubenswrapper[4875]: I1007 08:17:15.233163 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/06c908ee-f087-4e43-904f-5cc1e01a2464-kube-api-access-jb85f" (OuterVolumeSpecName: "kube-api-access-jb85f") pod "06c908ee-f087-4e43-904f-5cc1e01a2464" (UID: "06c908ee-f087-4e43-904f-5cc1e01a2464"). InnerVolumeSpecName "kube-api-access-jb85f". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 08:17:15 crc kubenswrapper[4875]: I1007 08:17:15.242652 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/06c908ee-f087-4e43-904f-5cc1e01a2464-config-data" (OuterVolumeSpecName: "config-data") pod "06c908ee-f087-4e43-904f-5cc1e01a2464" (UID: "06c908ee-f087-4e43-904f-5cc1e01a2464"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 08:17:15 crc kubenswrapper[4875]: I1007 08:17:15.292032 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/06c908ee-f087-4e43-904f-5cc1e01a2464-server-conf" (OuterVolumeSpecName: "server-conf") pod "06c908ee-f087-4e43-904f-5cc1e01a2464" (UID: "06c908ee-f087-4e43-904f-5cc1e01a2464"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 08:17:15 crc kubenswrapper[4875]: I1007 08:17:15.310758 4875 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/06c908ee-f087-4e43-904f-5cc1e01a2464-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Oct 07 08:17:15 crc kubenswrapper[4875]: I1007 08:17:15.311026 4875 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/06c908ee-f087-4e43-904f-5cc1e01a2464-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Oct 07 08:17:15 crc kubenswrapper[4875]: I1007 08:17:15.311102 4875 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/06c908ee-f087-4e43-904f-5cc1e01a2464-pod-info\") on node \"crc\" DevicePath \"\"" Oct 07 08:17:15 crc kubenswrapper[4875]: I1007 08:17:15.311167 4875 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/06c908ee-f087-4e43-904f-5cc1e01a2464-server-conf\") on node \"crc\" DevicePath \"\"" Oct 07 08:17:15 crc kubenswrapper[4875]: I1007 08:17:15.311230 4875 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/06c908ee-f087-4e43-904f-5cc1e01a2464-config-data\") on node \"crc\" DevicePath \"\"" Oct 07 08:17:15 crc kubenswrapper[4875]: I1007 08:17:15.311316 4875 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/06c908ee-f087-4e43-904f-5cc1e01a2464-plugins-conf\") on node \"crc\" DevicePath \"\"" Oct 07 08:17:15 crc kubenswrapper[4875]: I1007 08:17:15.311426 4875 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" " Oct 07 08:17:15 crc kubenswrapper[4875]: I1007 08:17:15.311501 4875 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/06c908ee-f087-4e43-904f-5cc1e01a2464-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Oct 07 08:17:15 crc kubenswrapper[4875]: I1007 08:17:15.311565 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jb85f\" (UniqueName: \"kubernetes.io/projected/06c908ee-f087-4e43-904f-5cc1e01a2464-kube-api-access-jb85f\") on node \"crc\" DevicePath \"\"" Oct 07 08:17:15 crc kubenswrapper[4875]: I1007 08:17:15.311650 4875 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/06c908ee-f087-4e43-904f-5cc1e01a2464-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Oct 07 08:17:15 crc kubenswrapper[4875]: I1007 08:17:15.342404 4875 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage01-crc" (UniqueName: "kubernetes.io/local-volume/local-storage01-crc") on node "crc" Oct 07 08:17:15 crc kubenswrapper[4875]: I1007 08:17:15.357593 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/06c908ee-f087-4e43-904f-5cc1e01a2464-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "06c908ee-f087-4e43-904f-5cc1e01a2464" (UID: "06c908ee-f087-4e43-904f-5cc1e01a2464"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 08:17:15 crc kubenswrapper[4875]: I1007 08:17:15.413850 4875 reconciler_common.go:293] "Volume detached for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" DevicePath \"\"" Oct 07 08:17:15 crc kubenswrapper[4875]: I1007 08:17:15.413893 4875 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/06c908ee-f087-4e43-904f-5cc1e01a2464-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Oct 07 08:17:16 crc kubenswrapper[4875]: I1007 08:17:16.062611 4875 generic.go:334] "Generic (PLEG): container finished" podID="169a8fe1-831d-46f5-b939-e1507c89453e" containerID="6f735bc5958aa92a6b5ff7e984af963a2d9883c708b83e365b76df8870a80031" exitCode=0 Oct 07 08:17:16 crc kubenswrapper[4875]: I1007 08:17:16.064090 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Oct 07 08:17:16 crc kubenswrapper[4875]: I1007 08:17:16.065051 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"169a8fe1-831d-46f5-b939-e1507c89453e","Type":"ContainerDied","Data":"6f735bc5958aa92a6b5ff7e984af963a2d9883c708b83e365b76df8870a80031"} Oct 07 08:17:16 crc kubenswrapper[4875]: I1007 08:17:16.133163 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Oct 07 08:17:16 crc kubenswrapper[4875]: I1007 08:17:16.144141 4875 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-server-0"] Oct 07 08:17:16 crc kubenswrapper[4875]: I1007 08:17:16.193769 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Oct 07 08:17:16 crc kubenswrapper[4875]: E1007 08:17:16.194344 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="06c908ee-f087-4e43-904f-5cc1e01a2464" containerName="rabbitmq" Oct 07 08:17:16 crc kubenswrapper[4875]: I1007 08:17:16.194365 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="06c908ee-f087-4e43-904f-5cc1e01a2464" containerName="rabbitmq" Oct 07 08:17:16 crc kubenswrapper[4875]: E1007 08:17:16.194408 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="06c908ee-f087-4e43-904f-5cc1e01a2464" containerName="setup-container" Oct 07 08:17:16 crc kubenswrapper[4875]: I1007 08:17:16.194415 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="06c908ee-f087-4e43-904f-5cc1e01a2464" containerName="setup-container" Oct 07 08:17:16 crc kubenswrapper[4875]: I1007 08:17:16.194611 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="06c908ee-f087-4e43-904f-5cc1e01a2464" containerName="rabbitmq" Oct 07 08:17:16 crc kubenswrapper[4875]: I1007 08:17:16.195750 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Oct 07 08:17:16 crc kubenswrapper[4875]: I1007 08:17:16.198323 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-d8zjz" Oct 07 08:17:16 crc kubenswrapper[4875]: I1007 08:17:16.198544 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Oct 07 08:17:16 crc kubenswrapper[4875]: I1007 08:17:16.199150 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Oct 07 08:17:16 crc kubenswrapper[4875]: I1007 08:17:16.199337 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Oct 07 08:17:16 crc kubenswrapper[4875]: I1007 08:17:16.199507 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Oct 07 08:17:16 crc kubenswrapper[4875]: I1007 08:17:16.199931 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Oct 07 08:17:16 crc kubenswrapper[4875]: I1007 08:17:16.203728 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Oct 07 08:17:16 crc kubenswrapper[4875]: I1007 08:17:16.208194 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Oct 07 08:17:16 crc kubenswrapper[4875]: I1007 08:17:16.334655 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6zc7d\" (UniqueName: \"kubernetes.io/projected/e2d60697-89d7-42a0-9457-efef02815764-kube-api-access-6zc7d\") pod \"rabbitmq-server-0\" (UID: \"e2d60697-89d7-42a0-9457-efef02815764\") " pod="openstack/rabbitmq-server-0" Oct 07 08:17:16 crc kubenswrapper[4875]: I1007 08:17:16.334932 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/e2d60697-89d7-42a0-9457-efef02815764-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"e2d60697-89d7-42a0-9457-efef02815764\") " pod="openstack/rabbitmq-server-0" Oct 07 08:17:16 crc kubenswrapper[4875]: I1007 08:17:16.335030 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/e2d60697-89d7-42a0-9457-efef02815764-server-conf\") pod \"rabbitmq-server-0\" (UID: \"e2d60697-89d7-42a0-9457-efef02815764\") " pod="openstack/rabbitmq-server-0" Oct 07 08:17:16 crc kubenswrapper[4875]: I1007 08:17:16.335120 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/e2d60697-89d7-42a0-9457-efef02815764-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"e2d60697-89d7-42a0-9457-efef02815764\") " pod="openstack/rabbitmq-server-0" Oct 07 08:17:16 crc kubenswrapper[4875]: I1007 08:17:16.335243 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/e2d60697-89d7-42a0-9457-efef02815764-pod-info\") pod \"rabbitmq-server-0\" (UID: \"e2d60697-89d7-42a0-9457-efef02815764\") " pod="openstack/rabbitmq-server-0" Oct 07 08:17:16 crc kubenswrapper[4875]: I1007 08:17:16.335333 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/e2d60697-89d7-42a0-9457-efef02815764-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"e2d60697-89d7-42a0-9457-efef02815764\") " pod="openstack/rabbitmq-server-0" Oct 07 08:17:16 crc kubenswrapper[4875]: I1007 08:17:16.335403 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/e2d60697-89d7-42a0-9457-efef02815764-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"e2d60697-89d7-42a0-9457-efef02815764\") " pod="openstack/rabbitmq-server-0" Oct 07 08:17:16 crc kubenswrapper[4875]: I1007 08:17:16.335472 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-server-0\" (UID: \"e2d60697-89d7-42a0-9457-efef02815764\") " pod="openstack/rabbitmq-server-0" Oct 07 08:17:16 crc kubenswrapper[4875]: I1007 08:17:16.335554 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/e2d60697-89d7-42a0-9457-efef02815764-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"e2d60697-89d7-42a0-9457-efef02815764\") " pod="openstack/rabbitmq-server-0" Oct 07 08:17:16 crc kubenswrapper[4875]: I1007 08:17:16.335675 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/e2d60697-89d7-42a0-9457-efef02815764-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"e2d60697-89d7-42a0-9457-efef02815764\") " pod="openstack/rabbitmq-server-0" Oct 07 08:17:16 crc kubenswrapper[4875]: I1007 08:17:16.335769 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/e2d60697-89d7-42a0-9457-efef02815764-config-data\") pod \"rabbitmq-server-0\" (UID: \"e2d60697-89d7-42a0-9457-efef02815764\") " pod="openstack/rabbitmq-server-0" Oct 07 08:17:16 crc kubenswrapper[4875]: I1007 08:17:16.439051 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/e2d60697-89d7-42a0-9457-efef02815764-server-conf\") pod \"rabbitmq-server-0\" (UID: \"e2d60697-89d7-42a0-9457-efef02815764\") " pod="openstack/rabbitmq-server-0" Oct 07 08:17:16 crc kubenswrapper[4875]: I1007 08:17:16.439118 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/e2d60697-89d7-42a0-9457-efef02815764-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"e2d60697-89d7-42a0-9457-efef02815764\") " pod="openstack/rabbitmq-server-0" Oct 07 08:17:16 crc kubenswrapper[4875]: I1007 08:17:16.439137 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/e2d60697-89d7-42a0-9457-efef02815764-pod-info\") pod \"rabbitmq-server-0\" (UID: \"e2d60697-89d7-42a0-9457-efef02815764\") " pod="openstack/rabbitmq-server-0" Oct 07 08:17:16 crc kubenswrapper[4875]: I1007 08:17:16.439175 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/e2d60697-89d7-42a0-9457-efef02815764-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"e2d60697-89d7-42a0-9457-efef02815764\") " pod="openstack/rabbitmq-server-0" Oct 07 08:17:16 crc kubenswrapper[4875]: I1007 08:17:16.439198 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/e2d60697-89d7-42a0-9457-efef02815764-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"e2d60697-89d7-42a0-9457-efef02815764\") " pod="openstack/rabbitmq-server-0" Oct 07 08:17:16 crc kubenswrapper[4875]: I1007 08:17:16.439219 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-server-0\" (UID: \"e2d60697-89d7-42a0-9457-efef02815764\") " pod="openstack/rabbitmq-server-0" Oct 07 08:17:16 crc kubenswrapper[4875]: I1007 08:17:16.439251 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/e2d60697-89d7-42a0-9457-efef02815764-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"e2d60697-89d7-42a0-9457-efef02815764\") " pod="openstack/rabbitmq-server-0" Oct 07 08:17:16 crc kubenswrapper[4875]: I1007 08:17:16.439353 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/e2d60697-89d7-42a0-9457-efef02815764-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"e2d60697-89d7-42a0-9457-efef02815764\") " pod="openstack/rabbitmq-server-0" Oct 07 08:17:16 crc kubenswrapper[4875]: I1007 08:17:16.439398 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/e2d60697-89d7-42a0-9457-efef02815764-config-data\") pod \"rabbitmq-server-0\" (UID: \"e2d60697-89d7-42a0-9457-efef02815764\") " pod="openstack/rabbitmq-server-0" Oct 07 08:17:16 crc kubenswrapper[4875]: I1007 08:17:16.439438 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6zc7d\" (UniqueName: \"kubernetes.io/projected/e2d60697-89d7-42a0-9457-efef02815764-kube-api-access-6zc7d\") pod \"rabbitmq-server-0\" (UID: \"e2d60697-89d7-42a0-9457-efef02815764\") " pod="openstack/rabbitmq-server-0" Oct 07 08:17:16 crc kubenswrapper[4875]: I1007 08:17:16.439488 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/e2d60697-89d7-42a0-9457-efef02815764-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"e2d60697-89d7-42a0-9457-efef02815764\") " pod="openstack/rabbitmq-server-0" Oct 07 08:17:16 crc kubenswrapper[4875]: I1007 08:17:16.440421 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/e2d60697-89d7-42a0-9457-efef02815764-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"e2d60697-89d7-42a0-9457-efef02815764\") " pod="openstack/rabbitmq-server-0" Oct 07 08:17:16 crc kubenswrapper[4875]: I1007 08:17:16.440780 4875 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-server-0\" (UID: \"e2d60697-89d7-42a0-9457-efef02815764\") device mount path \"/mnt/openstack/pv01\"" pod="openstack/rabbitmq-server-0" Oct 07 08:17:16 crc kubenswrapper[4875]: I1007 08:17:16.441332 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/e2d60697-89d7-42a0-9457-efef02815764-config-data\") pod \"rabbitmq-server-0\" (UID: \"e2d60697-89d7-42a0-9457-efef02815764\") " pod="openstack/rabbitmq-server-0" Oct 07 08:17:16 crc kubenswrapper[4875]: I1007 08:17:16.441333 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/e2d60697-89d7-42a0-9457-efef02815764-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"e2d60697-89d7-42a0-9457-efef02815764\") " pod="openstack/rabbitmq-server-0" Oct 07 08:17:16 crc kubenswrapper[4875]: I1007 08:17:16.442133 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/e2d60697-89d7-42a0-9457-efef02815764-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"e2d60697-89d7-42a0-9457-efef02815764\") " pod="openstack/rabbitmq-server-0" Oct 07 08:17:16 crc kubenswrapper[4875]: I1007 08:17:16.444005 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/e2d60697-89d7-42a0-9457-efef02815764-server-conf\") pod \"rabbitmq-server-0\" (UID: \"e2d60697-89d7-42a0-9457-efef02815764\") " pod="openstack/rabbitmq-server-0" Oct 07 08:17:16 crc kubenswrapper[4875]: I1007 08:17:16.446638 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/e2d60697-89d7-42a0-9457-efef02815764-pod-info\") pod \"rabbitmq-server-0\" (UID: \"e2d60697-89d7-42a0-9457-efef02815764\") " pod="openstack/rabbitmq-server-0" Oct 07 08:17:16 crc kubenswrapper[4875]: I1007 08:17:16.447942 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/e2d60697-89d7-42a0-9457-efef02815764-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"e2d60697-89d7-42a0-9457-efef02815764\") " pod="openstack/rabbitmq-server-0" Oct 07 08:17:16 crc kubenswrapper[4875]: I1007 08:17:16.457104 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/e2d60697-89d7-42a0-9457-efef02815764-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"e2d60697-89d7-42a0-9457-efef02815764\") " pod="openstack/rabbitmq-server-0" Oct 07 08:17:16 crc kubenswrapper[4875]: I1007 08:17:16.463238 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/e2d60697-89d7-42a0-9457-efef02815764-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"e2d60697-89d7-42a0-9457-efef02815764\") " pod="openstack/rabbitmq-server-0" Oct 07 08:17:16 crc kubenswrapper[4875]: I1007 08:17:16.467033 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6zc7d\" (UniqueName: \"kubernetes.io/projected/e2d60697-89d7-42a0-9457-efef02815764-kube-api-access-6zc7d\") pod \"rabbitmq-server-0\" (UID: \"e2d60697-89d7-42a0-9457-efef02815764\") " pod="openstack/rabbitmq-server-0" Oct 07 08:17:16 crc kubenswrapper[4875]: I1007 08:17:16.497149 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-server-0\" (UID: \"e2d60697-89d7-42a0-9457-efef02815764\") " pod="openstack/rabbitmq-server-0" Oct 07 08:17:16 crc kubenswrapper[4875]: I1007 08:17:16.530470 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Oct 07 08:17:16 crc kubenswrapper[4875]: I1007 08:17:16.547066 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Oct 07 08:17:16 crc kubenswrapper[4875]: I1007 08:17:16.645162 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/169a8fe1-831d-46f5-b939-e1507c89453e-plugins-conf\") pod \"169a8fe1-831d-46f5-b939-e1507c89453e\" (UID: \"169a8fe1-831d-46f5-b939-e1507c89453e\") " Oct 07 08:17:16 crc kubenswrapper[4875]: I1007 08:17:16.645246 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/169a8fe1-831d-46f5-b939-e1507c89453e-rabbitmq-plugins\") pod \"169a8fe1-831d-46f5-b939-e1507c89453e\" (UID: \"169a8fe1-831d-46f5-b939-e1507c89453e\") " Oct 07 08:17:16 crc kubenswrapper[4875]: I1007 08:17:16.645282 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/169a8fe1-831d-46f5-b939-e1507c89453e-pod-info\") pod \"169a8fe1-831d-46f5-b939-e1507c89453e\" (UID: \"169a8fe1-831d-46f5-b939-e1507c89453e\") " Oct 07 08:17:16 crc kubenswrapper[4875]: I1007 08:17:16.645312 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/169a8fe1-831d-46f5-b939-e1507c89453e-server-conf\") pod \"169a8fe1-831d-46f5-b939-e1507c89453e\" (UID: \"169a8fe1-831d-46f5-b939-e1507c89453e\") " Oct 07 08:17:16 crc kubenswrapper[4875]: I1007 08:17:16.645387 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rzzf5\" (UniqueName: \"kubernetes.io/projected/169a8fe1-831d-46f5-b939-e1507c89453e-kube-api-access-rzzf5\") pod \"169a8fe1-831d-46f5-b939-e1507c89453e\" (UID: \"169a8fe1-831d-46f5-b939-e1507c89453e\") " Oct 07 08:17:16 crc kubenswrapper[4875]: I1007 08:17:16.645453 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/169a8fe1-831d-46f5-b939-e1507c89453e-rabbitmq-confd\") pod \"169a8fe1-831d-46f5-b939-e1507c89453e\" (UID: \"169a8fe1-831d-46f5-b939-e1507c89453e\") " Oct 07 08:17:16 crc kubenswrapper[4875]: I1007 08:17:16.645551 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"169a8fe1-831d-46f5-b939-e1507c89453e\" (UID: \"169a8fe1-831d-46f5-b939-e1507c89453e\") " Oct 07 08:17:16 crc kubenswrapper[4875]: I1007 08:17:16.645583 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/169a8fe1-831d-46f5-b939-e1507c89453e-config-data\") pod \"169a8fe1-831d-46f5-b939-e1507c89453e\" (UID: \"169a8fe1-831d-46f5-b939-e1507c89453e\") " Oct 07 08:17:16 crc kubenswrapper[4875]: I1007 08:17:16.645610 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/169a8fe1-831d-46f5-b939-e1507c89453e-rabbitmq-erlang-cookie\") pod \"169a8fe1-831d-46f5-b939-e1507c89453e\" (UID: \"169a8fe1-831d-46f5-b939-e1507c89453e\") " Oct 07 08:17:16 crc kubenswrapper[4875]: I1007 08:17:16.645722 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/169a8fe1-831d-46f5-b939-e1507c89453e-rabbitmq-tls\") pod \"169a8fe1-831d-46f5-b939-e1507c89453e\" (UID: \"169a8fe1-831d-46f5-b939-e1507c89453e\") " Oct 07 08:17:16 crc kubenswrapper[4875]: I1007 08:17:16.645751 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/169a8fe1-831d-46f5-b939-e1507c89453e-erlang-cookie-secret\") pod \"169a8fe1-831d-46f5-b939-e1507c89453e\" (UID: \"169a8fe1-831d-46f5-b939-e1507c89453e\") " Oct 07 08:17:16 crc kubenswrapper[4875]: I1007 08:17:16.647557 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/169a8fe1-831d-46f5-b939-e1507c89453e-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "169a8fe1-831d-46f5-b939-e1507c89453e" (UID: "169a8fe1-831d-46f5-b939-e1507c89453e"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 08:17:16 crc kubenswrapper[4875]: I1007 08:17:16.648253 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/169a8fe1-831d-46f5-b939-e1507c89453e-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "169a8fe1-831d-46f5-b939-e1507c89453e" (UID: "169a8fe1-831d-46f5-b939-e1507c89453e"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 08:17:16 crc kubenswrapper[4875]: I1007 08:17:16.650056 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/169a8fe1-831d-46f5-b939-e1507c89453e-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "169a8fe1-831d-46f5-b939-e1507c89453e" (UID: "169a8fe1-831d-46f5-b939-e1507c89453e"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 08:17:16 crc kubenswrapper[4875]: I1007 08:17:16.651521 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/169a8fe1-831d-46f5-b939-e1507c89453e-pod-info" (OuterVolumeSpecName: "pod-info") pod "169a8fe1-831d-46f5-b939-e1507c89453e" (UID: "169a8fe1-831d-46f5-b939-e1507c89453e"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Oct 07 08:17:16 crc kubenswrapper[4875]: I1007 08:17:16.651745 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/169a8fe1-831d-46f5-b939-e1507c89453e-kube-api-access-rzzf5" (OuterVolumeSpecName: "kube-api-access-rzzf5") pod "169a8fe1-831d-46f5-b939-e1507c89453e" (UID: "169a8fe1-831d-46f5-b939-e1507c89453e"). InnerVolumeSpecName "kube-api-access-rzzf5". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 08:17:16 crc kubenswrapper[4875]: I1007 08:17:16.651821 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/169a8fe1-831d-46f5-b939-e1507c89453e-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "169a8fe1-831d-46f5-b939-e1507c89453e" (UID: "169a8fe1-831d-46f5-b939-e1507c89453e"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:17:16 crc kubenswrapper[4875]: I1007 08:17:16.661433 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage03-crc" (OuterVolumeSpecName: "persistence") pod "169a8fe1-831d-46f5-b939-e1507c89453e" (UID: "169a8fe1-831d-46f5-b939-e1507c89453e"). InnerVolumeSpecName "local-storage03-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 07 08:17:16 crc kubenswrapper[4875]: I1007 08:17:16.665155 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/169a8fe1-831d-46f5-b939-e1507c89453e-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "169a8fe1-831d-46f5-b939-e1507c89453e" (UID: "169a8fe1-831d-46f5-b939-e1507c89453e"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 08:17:16 crc kubenswrapper[4875]: I1007 08:17:16.682584 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/169a8fe1-831d-46f5-b939-e1507c89453e-config-data" (OuterVolumeSpecName: "config-data") pod "169a8fe1-831d-46f5-b939-e1507c89453e" (UID: "169a8fe1-831d-46f5-b939-e1507c89453e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 08:17:16 crc kubenswrapper[4875]: I1007 08:17:16.704816 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/169a8fe1-831d-46f5-b939-e1507c89453e-server-conf" (OuterVolumeSpecName: "server-conf") pod "169a8fe1-831d-46f5-b939-e1507c89453e" (UID: "169a8fe1-831d-46f5-b939-e1507c89453e"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 08:17:16 crc kubenswrapper[4875]: I1007 08:17:16.748723 4875 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") on node \"crc\" " Oct 07 08:17:16 crc kubenswrapper[4875]: I1007 08:17:16.749046 4875 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/169a8fe1-831d-46f5-b939-e1507c89453e-config-data\") on node \"crc\" DevicePath \"\"" Oct 07 08:17:16 crc kubenswrapper[4875]: I1007 08:17:16.749104 4875 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/169a8fe1-831d-46f5-b939-e1507c89453e-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Oct 07 08:17:16 crc kubenswrapper[4875]: I1007 08:17:16.749157 4875 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/169a8fe1-831d-46f5-b939-e1507c89453e-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Oct 07 08:17:16 crc kubenswrapper[4875]: I1007 08:17:16.749246 4875 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/169a8fe1-831d-46f5-b939-e1507c89453e-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Oct 07 08:17:16 crc kubenswrapper[4875]: I1007 08:17:16.749367 4875 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/169a8fe1-831d-46f5-b939-e1507c89453e-plugins-conf\") on node \"crc\" DevicePath \"\"" Oct 07 08:17:16 crc kubenswrapper[4875]: I1007 08:17:16.749424 4875 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/169a8fe1-831d-46f5-b939-e1507c89453e-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Oct 07 08:17:16 crc kubenswrapper[4875]: I1007 08:17:16.749474 4875 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/169a8fe1-831d-46f5-b939-e1507c89453e-pod-info\") on node \"crc\" DevicePath \"\"" Oct 07 08:17:16 crc kubenswrapper[4875]: I1007 08:17:16.749522 4875 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/169a8fe1-831d-46f5-b939-e1507c89453e-server-conf\") on node \"crc\" DevicePath \"\"" Oct 07 08:17:16 crc kubenswrapper[4875]: I1007 08:17:16.749579 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rzzf5\" (UniqueName: \"kubernetes.io/projected/169a8fe1-831d-46f5-b939-e1507c89453e-kube-api-access-rzzf5\") on node \"crc\" DevicePath \"\"" Oct 07 08:17:16 crc kubenswrapper[4875]: I1007 08:17:16.777465 4875 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage03-crc" (UniqueName: "kubernetes.io/local-volume/local-storage03-crc") on node "crc" Oct 07 08:17:16 crc kubenswrapper[4875]: I1007 08:17:16.798938 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/169a8fe1-831d-46f5-b939-e1507c89453e-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "169a8fe1-831d-46f5-b939-e1507c89453e" (UID: "169a8fe1-831d-46f5-b939-e1507c89453e"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 08:17:16 crc kubenswrapper[4875]: I1007 08:17:16.862844 4875 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/169a8fe1-831d-46f5-b939-e1507c89453e-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Oct 07 08:17:16 crc kubenswrapper[4875]: I1007 08:17:16.862893 4875 reconciler_common.go:293] "Volume detached for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") on node \"crc\" DevicePath \"\"" Oct 07 08:17:17 crc kubenswrapper[4875]: I1007 08:17:17.075367 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"169a8fe1-831d-46f5-b939-e1507c89453e","Type":"ContainerDied","Data":"a5a5892ee262ccef7372f29a25607fc3aef0237513b20cff031c3eea6883ed83"} Oct 07 08:17:17 crc kubenswrapper[4875]: I1007 08:17:17.075438 4875 scope.go:117] "RemoveContainer" containerID="6f735bc5958aa92a6b5ff7e984af963a2d9883c708b83e365b76df8870a80031" Oct 07 08:17:17 crc kubenswrapper[4875]: I1007 08:17:17.075436 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Oct 07 08:17:17 crc kubenswrapper[4875]: I1007 08:17:17.091045 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Oct 07 08:17:17 crc kubenswrapper[4875]: I1007 08:17:17.102734 4875 scope.go:117] "RemoveContainer" containerID="74829d972ae0ba9f879c590cdb2f186562e49f7a189fcb7398f145b5eb9ec846" Oct 07 08:17:17 crc kubenswrapper[4875]: W1007 08:17:17.111493 4875 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode2d60697_89d7_42a0_9457_efef02815764.slice/crio-a466797163ff34fa73d6acf4331b0290edd4265a195261b7df6a8d5d7dec0b81 WatchSource:0}: Error finding container a466797163ff34fa73d6acf4331b0290edd4265a195261b7df6a8d5d7dec0b81: Status 404 returned error can't find the container with id a466797163ff34fa73d6acf4331b0290edd4265a195261b7df6a8d5d7dec0b81 Oct 07 08:17:17 crc kubenswrapper[4875]: I1007 08:17:17.136761 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 07 08:17:17 crc kubenswrapper[4875]: I1007 08:17:17.150040 4875 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 07 08:17:17 crc kubenswrapper[4875]: I1007 08:17:17.160062 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 07 08:17:17 crc kubenswrapper[4875]: E1007 08:17:17.160525 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="169a8fe1-831d-46f5-b939-e1507c89453e" containerName="rabbitmq" Oct 07 08:17:17 crc kubenswrapper[4875]: I1007 08:17:17.160548 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="169a8fe1-831d-46f5-b939-e1507c89453e" containerName="rabbitmq" Oct 07 08:17:17 crc kubenswrapper[4875]: E1007 08:17:17.160576 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="169a8fe1-831d-46f5-b939-e1507c89453e" containerName="setup-container" Oct 07 08:17:17 crc kubenswrapper[4875]: I1007 08:17:17.160583 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="169a8fe1-831d-46f5-b939-e1507c89453e" containerName="setup-container" Oct 07 08:17:17 crc kubenswrapper[4875]: I1007 08:17:17.163045 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="169a8fe1-831d-46f5-b939-e1507c89453e" containerName="rabbitmq" Oct 07 08:17:17 crc kubenswrapper[4875]: I1007 08:17:17.165256 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Oct 07 08:17:17 crc kubenswrapper[4875]: I1007 08:17:17.169032 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Oct 07 08:17:17 crc kubenswrapper[4875]: I1007 08:17:17.169274 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Oct 07 08:17:17 crc kubenswrapper[4875]: I1007 08:17:17.169064 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Oct 07 08:17:17 crc kubenswrapper[4875]: I1007 08:17:17.169124 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Oct 07 08:17:17 crc kubenswrapper[4875]: I1007 08:17:17.169227 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-bkgpg" Oct 07 08:17:17 crc kubenswrapper[4875]: I1007 08:17:17.169793 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Oct 07 08:17:17 crc kubenswrapper[4875]: I1007 08:17:17.169995 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Oct 07 08:17:17 crc kubenswrapper[4875]: I1007 08:17:17.174608 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 07 08:17:17 crc kubenswrapper[4875]: I1007 08:17:17.271185 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/8bff65c7-6542-4501-90d2-fedc97d9a9d7-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"8bff65c7-6542-4501-90d2-fedc97d9a9d7\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 08:17:17 crc kubenswrapper[4875]: I1007 08:17:17.271678 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tnzfv\" (UniqueName: \"kubernetes.io/projected/8bff65c7-6542-4501-90d2-fedc97d9a9d7-kube-api-access-tnzfv\") pod \"rabbitmq-cell1-server-0\" (UID: \"8bff65c7-6542-4501-90d2-fedc97d9a9d7\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 08:17:17 crc kubenswrapper[4875]: I1007 08:17:17.271715 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/8bff65c7-6542-4501-90d2-fedc97d9a9d7-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"8bff65c7-6542-4501-90d2-fedc97d9a9d7\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 08:17:17 crc kubenswrapper[4875]: I1007 08:17:17.271740 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/8bff65c7-6542-4501-90d2-fedc97d9a9d7-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"8bff65c7-6542-4501-90d2-fedc97d9a9d7\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 08:17:17 crc kubenswrapper[4875]: I1007 08:17:17.271778 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/8bff65c7-6542-4501-90d2-fedc97d9a9d7-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"8bff65c7-6542-4501-90d2-fedc97d9a9d7\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 08:17:17 crc kubenswrapper[4875]: I1007 08:17:17.271872 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/8bff65c7-6542-4501-90d2-fedc97d9a9d7-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"8bff65c7-6542-4501-90d2-fedc97d9a9d7\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 08:17:17 crc kubenswrapper[4875]: I1007 08:17:17.271943 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"8bff65c7-6542-4501-90d2-fedc97d9a9d7\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 08:17:17 crc kubenswrapper[4875]: I1007 08:17:17.271974 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/8bff65c7-6542-4501-90d2-fedc97d9a9d7-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"8bff65c7-6542-4501-90d2-fedc97d9a9d7\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 08:17:17 crc kubenswrapper[4875]: I1007 08:17:17.272031 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/8bff65c7-6542-4501-90d2-fedc97d9a9d7-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"8bff65c7-6542-4501-90d2-fedc97d9a9d7\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 08:17:17 crc kubenswrapper[4875]: I1007 08:17:17.272067 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/8bff65c7-6542-4501-90d2-fedc97d9a9d7-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"8bff65c7-6542-4501-90d2-fedc97d9a9d7\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 08:17:17 crc kubenswrapper[4875]: I1007 08:17:17.272137 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/8bff65c7-6542-4501-90d2-fedc97d9a9d7-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"8bff65c7-6542-4501-90d2-fedc97d9a9d7\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 08:17:17 crc kubenswrapper[4875]: I1007 08:17:17.373991 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/8bff65c7-6542-4501-90d2-fedc97d9a9d7-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"8bff65c7-6542-4501-90d2-fedc97d9a9d7\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 08:17:17 crc kubenswrapper[4875]: I1007 08:17:17.374058 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tnzfv\" (UniqueName: \"kubernetes.io/projected/8bff65c7-6542-4501-90d2-fedc97d9a9d7-kube-api-access-tnzfv\") pod \"rabbitmq-cell1-server-0\" (UID: \"8bff65c7-6542-4501-90d2-fedc97d9a9d7\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 08:17:17 crc kubenswrapper[4875]: I1007 08:17:17.374097 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/8bff65c7-6542-4501-90d2-fedc97d9a9d7-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"8bff65c7-6542-4501-90d2-fedc97d9a9d7\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 08:17:17 crc kubenswrapper[4875]: I1007 08:17:17.374125 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/8bff65c7-6542-4501-90d2-fedc97d9a9d7-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"8bff65c7-6542-4501-90d2-fedc97d9a9d7\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 08:17:17 crc kubenswrapper[4875]: I1007 08:17:17.374157 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/8bff65c7-6542-4501-90d2-fedc97d9a9d7-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"8bff65c7-6542-4501-90d2-fedc97d9a9d7\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 08:17:17 crc kubenswrapper[4875]: I1007 08:17:17.374203 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/8bff65c7-6542-4501-90d2-fedc97d9a9d7-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"8bff65c7-6542-4501-90d2-fedc97d9a9d7\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 08:17:17 crc kubenswrapper[4875]: I1007 08:17:17.374309 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"8bff65c7-6542-4501-90d2-fedc97d9a9d7\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 08:17:17 crc kubenswrapper[4875]: I1007 08:17:17.374340 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/8bff65c7-6542-4501-90d2-fedc97d9a9d7-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"8bff65c7-6542-4501-90d2-fedc97d9a9d7\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 08:17:17 crc kubenswrapper[4875]: I1007 08:17:17.374392 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/8bff65c7-6542-4501-90d2-fedc97d9a9d7-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"8bff65c7-6542-4501-90d2-fedc97d9a9d7\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 08:17:17 crc kubenswrapper[4875]: I1007 08:17:17.374426 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/8bff65c7-6542-4501-90d2-fedc97d9a9d7-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"8bff65c7-6542-4501-90d2-fedc97d9a9d7\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 08:17:17 crc kubenswrapper[4875]: I1007 08:17:17.374474 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/8bff65c7-6542-4501-90d2-fedc97d9a9d7-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"8bff65c7-6542-4501-90d2-fedc97d9a9d7\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 08:17:17 crc kubenswrapper[4875]: I1007 08:17:17.375209 4875 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"8bff65c7-6542-4501-90d2-fedc97d9a9d7\") device mount path \"/mnt/openstack/pv03\"" pod="openstack/rabbitmq-cell1-server-0" Oct 07 08:17:17 crc kubenswrapper[4875]: I1007 08:17:17.375415 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/8bff65c7-6542-4501-90d2-fedc97d9a9d7-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"8bff65c7-6542-4501-90d2-fedc97d9a9d7\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 08:17:17 crc kubenswrapper[4875]: I1007 08:17:17.375952 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/8bff65c7-6542-4501-90d2-fedc97d9a9d7-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"8bff65c7-6542-4501-90d2-fedc97d9a9d7\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 08:17:17 crc kubenswrapper[4875]: I1007 08:17:17.376098 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/8bff65c7-6542-4501-90d2-fedc97d9a9d7-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"8bff65c7-6542-4501-90d2-fedc97d9a9d7\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 08:17:17 crc kubenswrapper[4875]: I1007 08:17:17.376125 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/8bff65c7-6542-4501-90d2-fedc97d9a9d7-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"8bff65c7-6542-4501-90d2-fedc97d9a9d7\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 08:17:17 crc kubenswrapper[4875]: I1007 08:17:17.376460 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/8bff65c7-6542-4501-90d2-fedc97d9a9d7-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"8bff65c7-6542-4501-90d2-fedc97d9a9d7\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 08:17:17 crc kubenswrapper[4875]: I1007 08:17:17.380274 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/8bff65c7-6542-4501-90d2-fedc97d9a9d7-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"8bff65c7-6542-4501-90d2-fedc97d9a9d7\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 08:17:17 crc kubenswrapper[4875]: I1007 08:17:17.380467 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/8bff65c7-6542-4501-90d2-fedc97d9a9d7-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"8bff65c7-6542-4501-90d2-fedc97d9a9d7\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 08:17:17 crc kubenswrapper[4875]: I1007 08:17:17.380499 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/8bff65c7-6542-4501-90d2-fedc97d9a9d7-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"8bff65c7-6542-4501-90d2-fedc97d9a9d7\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 08:17:17 crc kubenswrapper[4875]: I1007 08:17:17.380625 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/8bff65c7-6542-4501-90d2-fedc97d9a9d7-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"8bff65c7-6542-4501-90d2-fedc97d9a9d7\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 08:17:17 crc kubenswrapper[4875]: I1007 08:17:17.394355 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tnzfv\" (UniqueName: \"kubernetes.io/projected/8bff65c7-6542-4501-90d2-fedc97d9a9d7-kube-api-access-tnzfv\") pod \"rabbitmq-cell1-server-0\" (UID: \"8bff65c7-6542-4501-90d2-fedc97d9a9d7\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 08:17:17 crc kubenswrapper[4875]: I1007 08:17:17.406231 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"8bff65c7-6542-4501-90d2-fedc97d9a9d7\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 08:17:17 crc kubenswrapper[4875]: I1007 08:17:17.499379 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Oct 07 08:17:17 crc kubenswrapper[4875]: I1007 08:17:17.719744 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="06c908ee-f087-4e43-904f-5cc1e01a2464" path="/var/lib/kubelet/pods/06c908ee-f087-4e43-904f-5cc1e01a2464/volumes" Oct 07 08:17:17 crc kubenswrapper[4875]: I1007 08:17:17.721492 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="169a8fe1-831d-46f5-b939-e1507c89453e" path="/var/lib/kubelet/pods/169a8fe1-831d-46f5-b939-e1507c89453e/volumes" Oct 07 08:17:18 crc kubenswrapper[4875]: I1007 08:17:18.007523 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 07 08:17:18 crc kubenswrapper[4875]: I1007 08:17:18.087585 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"e2d60697-89d7-42a0-9457-efef02815764","Type":"ContainerStarted","Data":"a466797163ff34fa73d6acf4331b0290edd4265a195261b7df6a8d5d7dec0b81"} Oct 07 08:17:18 crc kubenswrapper[4875]: I1007 08:17:18.089905 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"8bff65c7-6542-4501-90d2-fedc97d9a9d7","Type":"ContainerStarted","Data":"4670403a821b29db016cf567b75aac0f395adb7197f99c05cda68c3c3f2501a7"} Oct 07 08:17:18 crc kubenswrapper[4875]: I1007 08:17:18.150621 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-d558885bc-fpsjt"] Oct 07 08:17:18 crc kubenswrapper[4875]: I1007 08:17:18.152514 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-d558885bc-fpsjt" Oct 07 08:17:18 crc kubenswrapper[4875]: I1007 08:17:18.158591 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-edpm-ipam" Oct 07 08:17:18 crc kubenswrapper[4875]: I1007 08:17:18.177378 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-d558885bc-fpsjt"] Oct 07 08:17:18 crc kubenswrapper[4875]: I1007 08:17:18.293867 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4277aac2-b1ae-485c-a565-5d0b31d4db32-dns-svc\") pod \"dnsmasq-dns-d558885bc-fpsjt\" (UID: \"4277aac2-b1ae-485c-a565-5d0b31d4db32\") " pod="openstack/dnsmasq-dns-d558885bc-fpsjt" Oct 07 08:17:18 crc kubenswrapper[4875]: I1007 08:17:18.294041 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/4277aac2-b1ae-485c-a565-5d0b31d4db32-ovsdbserver-nb\") pod \"dnsmasq-dns-d558885bc-fpsjt\" (UID: \"4277aac2-b1ae-485c-a565-5d0b31d4db32\") " pod="openstack/dnsmasq-dns-d558885bc-fpsjt" Oct 07 08:17:18 crc kubenswrapper[4875]: I1007 08:17:18.294079 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/4277aac2-b1ae-485c-a565-5d0b31d4db32-ovsdbserver-sb\") pod \"dnsmasq-dns-d558885bc-fpsjt\" (UID: \"4277aac2-b1ae-485c-a565-5d0b31d4db32\") " pod="openstack/dnsmasq-dns-d558885bc-fpsjt" Oct 07 08:17:18 crc kubenswrapper[4875]: I1007 08:17:18.294106 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4277aac2-b1ae-485c-a565-5d0b31d4db32-config\") pod \"dnsmasq-dns-d558885bc-fpsjt\" (UID: \"4277aac2-b1ae-485c-a565-5d0b31d4db32\") " pod="openstack/dnsmasq-dns-d558885bc-fpsjt" Oct 07 08:17:18 crc kubenswrapper[4875]: I1007 08:17:18.294227 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/4277aac2-b1ae-485c-a565-5d0b31d4db32-dns-swift-storage-0\") pod \"dnsmasq-dns-d558885bc-fpsjt\" (UID: \"4277aac2-b1ae-485c-a565-5d0b31d4db32\") " pod="openstack/dnsmasq-dns-d558885bc-fpsjt" Oct 07 08:17:18 crc kubenswrapper[4875]: I1007 08:17:18.294445 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/4277aac2-b1ae-485c-a565-5d0b31d4db32-openstack-edpm-ipam\") pod \"dnsmasq-dns-d558885bc-fpsjt\" (UID: \"4277aac2-b1ae-485c-a565-5d0b31d4db32\") " pod="openstack/dnsmasq-dns-d558885bc-fpsjt" Oct 07 08:17:18 crc kubenswrapper[4875]: I1007 08:17:18.294492 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8jkvx\" (UniqueName: \"kubernetes.io/projected/4277aac2-b1ae-485c-a565-5d0b31d4db32-kube-api-access-8jkvx\") pod \"dnsmasq-dns-d558885bc-fpsjt\" (UID: \"4277aac2-b1ae-485c-a565-5d0b31d4db32\") " pod="openstack/dnsmasq-dns-d558885bc-fpsjt" Oct 07 08:17:18 crc kubenswrapper[4875]: I1007 08:17:18.396250 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/4277aac2-b1ae-485c-a565-5d0b31d4db32-ovsdbserver-sb\") pod \"dnsmasq-dns-d558885bc-fpsjt\" (UID: \"4277aac2-b1ae-485c-a565-5d0b31d4db32\") " pod="openstack/dnsmasq-dns-d558885bc-fpsjt" Oct 07 08:17:18 crc kubenswrapper[4875]: I1007 08:17:18.396303 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4277aac2-b1ae-485c-a565-5d0b31d4db32-config\") pod \"dnsmasq-dns-d558885bc-fpsjt\" (UID: \"4277aac2-b1ae-485c-a565-5d0b31d4db32\") " pod="openstack/dnsmasq-dns-d558885bc-fpsjt" Oct 07 08:17:18 crc kubenswrapper[4875]: I1007 08:17:18.396322 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/4277aac2-b1ae-485c-a565-5d0b31d4db32-dns-swift-storage-0\") pod \"dnsmasq-dns-d558885bc-fpsjt\" (UID: \"4277aac2-b1ae-485c-a565-5d0b31d4db32\") " pod="openstack/dnsmasq-dns-d558885bc-fpsjt" Oct 07 08:17:18 crc kubenswrapper[4875]: I1007 08:17:18.396354 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/4277aac2-b1ae-485c-a565-5d0b31d4db32-openstack-edpm-ipam\") pod \"dnsmasq-dns-d558885bc-fpsjt\" (UID: \"4277aac2-b1ae-485c-a565-5d0b31d4db32\") " pod="openstack/dnsmasq-dns-d558885bc-fpsjt" Oct 07 08:17:18 crc kubenswrapper[4875]: I1007 08:17:18.396375 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8jkvx\" (UniqueName: \"kubernetes.io/projected/4277aac2-b1ae-485c-a565-5d0b31d4db32-kube-api-access-8jkvx\") pod \"dnsmasq-dns-d558885bc-fpsjt\" (UID: \"4277aac2-b1ae-485c-a565-5d0b31d4db32\") " pod="openstack/dnsmasq-dns-d558885bc-fpsjt" Oct 07 08:17:18 crc kubenswrapper[4875]: I1007 08:17:18.396470 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4277aac2-b1ae-485c-a565-5d0b31d4db32-dns-svc\") pod \"dnsmasq-dns-d558885bc-fpsjt\" (UID: \"4277aac2-b1ae-485c-a565-5d0b31d4db32\") " pod="openstack/dnsmasq-dns-d558885bc-fpsjt" Oct 07 08:17:18 crc kubenswrapper[4875]: I1007 08:17:18.396530 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/4277aac2-b1ae-485c-a565-5d0b31d4db32-ovsdbserver-nb\") pod \"dnsmasq-dns-d558885bc-fpsjt\" (UID: \"4277aac2-b1ae-485c-a565-5d0b31d4db32\") " pod="openstack/dnsmasq-dns-d558885bc-fpsjt" Oct 07 08:17:18 crc kubenswrapper[4875]: I1007 08:17:18.397459 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/4277aac2-b1ae-485c-a565-5d0b31d4db32-ovsdbserver-sb\") pod \"dnsmasq-dns-d558885bc-fpsjt\" (UID: \"4277aac2-b1ae-485c-a565-5d0b31d4db32\") " pod="openstack/dnsmasq-dns-d558885bc-fpsjt" Oct 07 08:17:18 crc kubenswrapper[4875]: I1007 08:17:18.397719 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/4277aac2-b1ae-485c-a565-5d0b31d4db32-ovsdbserver-nb\") pod \"dnsmasq-dns-d558885bc-fpsjt\" (UID: \"4277aac2-b1ae-485c-a565-5d0b31d4db32\") " pod="openstack/dnsmasq-dns-d558885bc-fpsjt" Oct 07 08:17:18 crc kubenswrapper[4875]: I1007 08:17:18.397929 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/4277aac2-b1ae-485c-a565-5d0b31d4db32-openstack-edpm-ipam\") pod \"dnsmasq-dns-d558885bc-fpsjt\" (UID: \"4277aac2-b1ae-485c-a565-5d0b31d4db32\") " pod="openstack/dnsmasq-dns-d558885bc-fpsjt" Oct 07 08:17:18 crc kubenswrapper[4875]: I1007 08:17:18.397940 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4277aac2-b1ae-485c-a565-5d0b31d4db32-config\") pod \"dnsmasq-dns-d558885bc-fpsjt\" (UID: \"4277aac2-b1ae-485c-a565-5d0b31d4db32\") " pod="openstack/dnsmasq-dns-d558885bc-fpsjt" Oct 07 08:17:18 crc kubenswrapper[4875]: I1007 08:17:18.397999 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4277aac2-b1ae-485c-a565-5d0b31d4db32-dns-svc\") pod \"dnsmasq-dns-d558885bc-fpsjt\" (UID: \"4277aac2-b1ae-485c-a565-5d0b31d4db32\") " pod="openstack/dnsmasq-dns-d558885bc-fpsjt" Oct 07 08:17:18 crc kubenswrapper[4875]: I1007 08:17:18.398609 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/4277aac2-b1ae-485c-a565-5d0b31d4db32-dns-swift-storage-0\") pod \"dnsmasq-dns-d558885bc-fpsjt\" (UID: \"4277aac2-b1ae-485c-a565-5d0b31d4db32\") " pod="openstack/dnsmasq-dns-d558885bc-fpsjt" Oct 07 08:17:18 crc kubenswrapper[4875]: I1007 08:17:18.434782 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8jkvx\" (UniqueName: \"kubernetes.io/projected/4277aac2-b1ae-485c-a565-5d0b31d4db32-kube-api-access-8jkvx\") pod \"dnsmasq-dns-d558885bc-fpsjt\" (UID: \"4277aac2-b1ae-485c-a565-5d0b31d4db32\") " pod="openstack/dnsmasq-dns-d558885bc-fpsjt" Oct 07 08:17:18 crc kubenswrapper[4875]: E1007 08:17:18.557479 4875 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod06c908ee_f087_4e43_904f_5cc1e01a2464.slice\": RecentStats: unable to find data in memory cache]" Oct 07 08:17:18 crc kubenswrapper[4875]: I1007 08:17:18.635778 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-d558885bc-fpsjt" Oct 07 08:17:19 crc kubenswrapper[4875]: I1007 08:17:19.104118 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"e2d60697-89d7-42a0-9457-efef02815764","Type":"ContainerStarted","Data":"3448e5b593d30c1da84fba72a6bb315bcb86bb18c9c0c0f0186d61d15d69488c"} Oct 07 08:17:19 crc kubenswrapper[4875]: I1007 08:17:19.163830 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-d558885bc-fpsjt"] Oct 07 08:17:20 crc kubenswrapper[4875]: I1007 08:17:20.114474 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-d558885bc-fpsjt" event={"ID":"4277aac2-b1ae-485c-a565-5d0b31d4db32","Type":"ContainerStarted","Data":"415f0ef6d68b46bb72116850f69531a27752fa36c1986d8708d4dab7a64cb591"} Oct 07 08:17:20 crc kubenswrapper[4875]: I1007 08:17:20.114871 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-d558885bc-fpsjt" event={"ID":"4277aac2-b1ae-485c-a565-5d0b31d4db32","Type":"ContainerStarted","Data":"c3215f766b584683b85063a28e77ca0e73c01b9342dba1a44c0afcf1b11b3bb2"} Oct 07 08:17:20 crc kubenswrapper[4875]: I1007 08:17:20.117481 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"8bff65c7-6542-4501-90d2-fedc97d9a9d7","Type":"ContainerStarted","Data":"9cd331d7e89e37ebf1fda9dedb99c4ddd45e9906a4e9d85c8fe569766a379252"} Oct 07 08:17:21 crc kubenswrapper[4875]: I1007 08:17:21.133448 4875 generic.go:334] "Generic (PLEG): container finished" podID="4277aac2-b1ae-485c-a565-5d0b31d4db32" containerID="415f0ef6d68b46bb72116850f69531a27752fa36c1986d8708d4dab7a64cb591" exitCode=0 Oct 07 08:17:21 crc kubenswrapper[4875]: I1007 08:17:21.133868 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-d558885bc-fpsjt" event={"ID":"4277aac2-b1ae-485c-a565-5d0b31d4db32","Type":"ContainerDied","Data":"415f0ef6d68b46bb72116850f69531a27752fa36c1986d8708d4dab7a64cb591"} Oct 07 08:17:22 crc kubenswrapper[4875]: I1007 08:17:22.147713 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-d558885bc-fpsjt" event={"ID":"4277aac2-b1ae-485c-a565-5d0b31d4db32","Type":"ContainerStarted","Data":"1e14407d17296112d97e04877c1bcc1407b46efa24520230ecddecb1a6ba4541"} Oct 07 08:17:22 crc kubenswrapper[4875]: I1007 08:17:22.148181 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-d558885bc-fpsjt" Oct 07 08:17:22 crc kubenswrapper[4875]: I1007 08:17:22.174304 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-d558885bc-fpsjt" podStartSLOduration=4.174270385 podStartE2EDuration="4.174270385s" podCreationTimestamp="2025-10-07 08:17:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 08:17:22.165712891 +0000 UTC m=+1267.125483474" watchObservedRunningTime="2025-10-07 08:17:22.174270385 +0000 UTC m=+1267.134040978" Oct 07 08:17:28 crc kubenswrapper[4875]: I1007 08:17:28.637123 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-d558885bc-fpsjt" Oct 07 08:17:28 crc kubenswrapper[4875]: I1007 08:17:28.734807 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-cd5cbd7b9-bpsf7"] Oct 07 08:17:28 crc kubenswrapper[4875]: I1007 08:17:28.735112 4875 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-cd5cbd7b9-bpsf7" podUID="fc21aba7-0a91-4583-9c30-7b5f8efcb165" containerName="dnsmasq-dns" containerID="cri-o://25acfa43f917e4a4c187cae67cea596c56cfcd3e883b78c57c3f7215f897f29f" gracePeriod=10 Oct 07 08:17:28 crc kubenswrapper[4875]: E1007 08:17:28.849914 4875 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod06c908ee_f087_4e43_904f_5cc1e01a2464.slice\": RecentStats: unable to find data in memory cache]" Oct 07 08:17:28 crc kubenswrapper[4875]: I1007 08:17:28.873059 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-78c64bc9c5-5gt8q"] Oct 07 08:17:28 crc kubenswrapper[4875]: I1007 08:17:28.881425 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78c64bc9c5-5gt8q" Oct 07 08:17:28 crc kubenswrapper[4875]: I1007 08:17:28.902475 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-78c64bc9c5-5gt8q"] Oct 07 08:17:28 crc kubenswrapper[4875]: I1007 08:17:28.933561 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/f505e712-23fa-4ef3-b464-591427bea934-openstack-edpm-ipam\") pod \"dnsmasq-dns-78c64bc9c5-5gt8q\" (UID: \"f505e712-23fa-4ef3-b464-591427bea934\") " pod="openstack/dnsmasq-dns-78c64bc9c5-5gt8q" Oct 07 08:17:28 crc kubenswrapper[4875]: I1007 08:17:28.934058 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f505e712-23fa-4ef3-b464-591427bea934-ovsdbserver-nb\") pod \"dnsmasq-dns-78c64bc9c5-5gt8q\" (UID: \"f505e712-23fa-4ef3-b464-591427bea934\") " pod="openstack/dnsmasq-dns-78c64bc9c5-5gt8q" Oct 07 08:17:28 crc kubenswrapper[4875]: I1007 08:17:28.934090 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f505e712-23fa-4ef3-b464-591427bea934-config\") pod \"dnsmasq-dns-78c64bc9c5-5gt8q\" (UID: \"f505e712-23fa-4ef3-b464-591427bea934\") " pod="openstack/dnsmasq-dns-78c64bc9c5-5gt8q" Oct 07 08:17:28 crc kubenswrapper[4875]: I1007 08:17:28.934124 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/f505e712-23fa-4ef3-b464-591427bea934-dns-swift-storage-0\") pod \"dnsmasq-dns-78c64bc9c5-5gt8q\" (UID: \"f505e712-23fa-4ef3-b464-591427bea934\") " pod="openstack/dnsmasq-dns-78c64bc9c5-5gt8q" Oct 07 08:17:28 crc kubenswrapper[4875]: I1007 08:17:28.934262 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f505e712-23fa-4ef3-b464-591427bea934-dns-svc\") pod \"dnsmasq-dns-78c64bc9c5-5gt8q\" (UID: \"f505e712-23fa-4ef3-b464-591427bea934\") " pod="openstack/dnsmasq-dns-78c64bc9c5-5gt8q" Oct 07 08:17:28 crc kubenswrapper[4875]: I1007 08:17:28.934294 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f505e712-23fa-4ef3-b464-591427bea934-ovsdbserver-sb\") pod \"dnsmasq-dns-78c64bc9c5-5gt8q\" (UID: \"f505e712-23fa-4ef3-b464-591427bea934\") " pod="openstack/dnsmasq-dns-78c64bc9c5-5gt8q" Oct 07 08:17:28 crc kubenswrapper[4875]: I1007 08:17:28.934333 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g6p76\" (UniqueName: \"kubernetes.io/projected/f505e712-23fa-4ef3-b464-591427bea934-kube-api-access-g6p76\") pod \"dnsmasq-dns-78c64bc9c5-5gt8q\" (UID: \"f505e712-23fa-4ef3-b464-591427bea934\") " pod="openstack/dnsmasq-dns-78c64bc9c5-5gt8q" Oct 07 08:17:29 crc kubenswrapper[4875]: I1007 08:17:29.035633 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f505e712-23fa-4ef3-b464-591427bea934-ovsdbserver-nb\") pod \"dnsmasq-dns-78c64bc9c5-5gt8q\" (UID: \"f505e712-23fa-4ef3-b464-591427bea934\") " pod="openstack/dnsmasq-dns-78c64bc9c5-5gt8q" Oct 07 08:17:29 crc kubenswrapper[4875]: I1007 08:17:29.036943 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f505e712-23fa-4ef3-b464-591427bea934-config\") pod \"dnsmasq-dns-78c64bc9c5-5gt8q\" (UID: \"f505e712-23fa-4ef3-b464-591427bea934\") " pod="openstack/dnsmasq-dns-78c64bc9c5-5gt8q" Oct 07 08:17:29 crc kubenswrapper[4875]: I1007 08:17:29.036984 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/f505e712-23fa-4ef3-b464-591427bea934-dns-swift-storage-0\") pod \"dnsmasq-dns-78c64bc9c5-5gt8q\" (UID: \"f505e712-23fa-4ef3-b464-591427bea934\") " pod="openstack/dnsmasq-dns-78c64bc9c5-5gt8q" Oct 07 08:17:29 crc kubenswrapper[4875]: I1007 08:17:29.037056 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f505e712-23fa-4ef3-b464-591427bea934-dns-svc\") pod \"dnsmasq-dns-78c64bc9c5-5gt8q\" (UID: \"f505e712-23fa-4ef3-b464-591427bea934\") " pod="openstack/dnsmasq-dns-78c64bc9c5-5gt8q" Oct 07 08:17:29 crc kubenswrapper[4875]: I1007 08:17:29.037087 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f505e712-23fa-4ef3-b464-591427bea934-ovsdbserver-sb\") pod \"dnsmasq-dns-78c64bc9c5-5gt8q\" (UID: \"f505e712-23fa-4ef3-b464-591427bea934\") " pod="openstack/dnsmasq-dns-78c64bc9c5-5gt8q" Oct 07 08:17:29 crc kubenswrapper[4875]: I1007 08:17:29.037127 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g6p76\" (UniqueName: \"kubernetes.io/projected/f505e712-23fa-4ef3-b464-591427bea934-kube-api-access-g6p76\") pod \"dnsmasq-dns-78c64bc9c5-5gt8q\" (UID: \"f505e712-23fa-4ef3-b464-591427bea934\") " pod="openstack/dnsmasq-dns-78c64bc9c5-5gt8q" Oct 07 08:17:29 crc kubenswrapper[4875]: I1007 08:17:29.037177 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/f505e712-23fa-4ef3-b464-591427bea934-openstack-edpm-ipam\") pod \"dnsmasq-dns-78c64bc9c5-5gt8q\" (UID: \"f505e712-23fa-4ef3-b464-591427bea934\") " pod="openstack/dnsmasq-dns-78c64bc9c5-5gt8q" Oct 07 08:17:29 crc kubenswrapper[4875]: I1007 08:17:29.037714 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/f505e712-23fa-4ef3-b464-591427bea934-openstack-edpm-ipam\") pod \"dnsmasq-dns-78c64bc9c5-5gt8q\" (UID: \"f505e712-23fa-4ef3-b464-591427bea934\") " pod="openstack/dnsmasq-dns-78c64bc9c5-5gt8q" Oct 07 08:17:29 crc kubenswrapper[4875]: I1007 08:17:29.036829 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f505e712-23fa-4ef3-b464-591427bea934-ovsdbserver-nb\") pod \"dnsmasq-dns-78c64bc9c5-5gt8q\" (UID: \"f505e712-23fa-4ef3-b464-591427bea934\") " pod="openstack/dnsmasq-dns-78c64bc9c5-5gt8q" Oct 07 08:17:29 crc kubenswrapper[4875]: I1007 08:17:29.038332 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f505e712-23fa-4ef3-b464-591427bea934-config\") pod \"dnsmasq-dns-78c64bc9c5-5gt8q\" (UID: \"f505e712-23fa-4ef3-b464-591427bea934\") " pod="openstack/dnsmasq-dns-78c64bc9c5-5gt8q" Oct 07 08:17:29 crc kubenswrapper[4875]: I1007 08:17:29.038823 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/f505e712-23fa-4ef3-b464-591427bea934-dns-swift-storage-0\") pod \"dnsmasq-dns-78c64bc9c5-5gt8q\" (UID: \"f505e712-23fa-4ef3-b464-591427bea934\") " pod="openstack/dnsmasq-dns-78c64bc9c5-5gt8q" Oct 07 08:17:29 crc kubenswrapper[4875]: I1007 08:17:29.039375 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f505e712-23fa-4ef3-b464-591427bea934-dns-svc\") pod \"dnsmasq-dns-78c64bc9c5-5gt8q\" (UID: \"f505e712-23fa-4ef3-b464-591427bea934\") " pod="openstack/dnsmasq-dns-78c64bc9c5-5gt8q" Oct 07 08:17:29 crc kubenswrapper[4875]: I1007 08:17:29.039899 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f505e712-23fa-4ef3-b464-591427bea934-ovsdbserver-sb\") pod \"dnsmasq-dns-78c64bc9c5-5gt8q\" (UID: \"f505e712-23fa-4ef3-b464-591427bea934\") " pod="openstack/dnsmasq-dns-78c64bc9c5-5gt8q" Oct 07 08:17:29 crc kubenswrapper[4875]: I1007 08:17:29.064447 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g6p76\" (UniqueName: \"kubernetes.io/projected/f505e712-23fa-4ef3-b464-591427bea934-kube-api-access-g6p76\") pod \"dnsmasq-dns-78c64bc9c5-5gt8q\" (UID: \"f505e712-23fa-4ef3-b464-591427bea934\") " pod="openstack/dnsmasq-dns-78c64bc9c5-5gt8q" Oct 07 08:17:29 crc kubenswrapper[4875]: I1007 08:17:29.221492 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78c64bc9c5-5gt8q" Oct 07 08:17:29 crc kubenswrapper[4875]: I1007 08:17:29.230268 4875 generic.go:334] "Generic (PLEG): container finished" podID="fc21aba7-0a91-4583-9c30-7b5f8efcb165" containerID="25acfa43f917e4a4c187cae67cea596c56cfcd3e883b78c57c3f7215f897f29f" exitCode=0 Oct 07 08:17:29 crc kubenswrapper[4875]: I1007 08:17:29.230335 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-cd5cbd7b9-bpsf7" event={"ID":"fc21aba7-0a91-4583-9c30-7b5f8efcb165","Type":"ContainerDied","Data":"25acfa43f917e4a4c187cae67cea596c56cfcd3e883b78c57c3f7215f897f29f"} Oct 07 08:17:29 crc kubenswrapper[4875]: I1007 08:17:29.230371 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-cd5cbd7b9-bpsf7" event={"ID":"fc21aba7-0a91-4583-9c30-7b5f8efcb165","Type":"ContainerDied","Data":"369377f7f9bafe82f60f9de7797863824c37799a4879394a03d2fd0b13469c8e"} Oct 07 08:17:29 crc kubenswrapper[4875]: I1007 08:17:29.230386 4875 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="369377f7f9bafe82f60f9de7797863824c37799a4879394a03d2fd0b13469c8e" Oct 07 08:17:29 crc kubenswrapper[4875]: I1007 08:17:29.320639 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-cd5cbd7b9-bpsf7" Oct 07 08:17:29 crc kubenswrapper[4875]: I1007 08:17:29.447272 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/fc21aba7-0a91-4583-9c30-7b5f8efcb165-dns-svc\") pod \"fc21aba7-0a91-4583-9c30-7b5f8efcb165\" (UID: \"fc21aba7-0a91-4583-9c30-7b5f8efcb165\") " Oct 07 08:17:29 crc kubenswrapper[4875]: I1007 08:17:29.447353 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/fc21aba7-0a91-4583-9c30-7b5f8efcb165-ovsdbserver-nb\") pod \"fc21aba7-0a91-4583-9c30-7b5f8efcb165\" (UID: \"fc21aba7-0a91-4583-9c30-7b5f8efcb165\") " Oct 07 08:17:29 crc kubenswrapper[4875]: I1007 08:17:29.447513 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/fc21aba7-0a91-4583-9c30-7b5f8efcb165-dns-swift-storage-0\") pod \"fc21aba7-0a91-4583-9c30-7b5f8efcb165\" (UID: \"fc21aba7-0a91-4583-9c30-7b5f8efcb165\") " Oct 07 08:17:29 crc kubenswrapper[4875]: I1007 08:17:29.447642 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2df8f\" (UniqueName: \"kubernetes.io/projected/fc21aba7-0a91-4583-9c30-7b5f8efcb165-kube-api-access-2df8f\") pod \"fc21aba7-0a91-4583-9c30-7b5f8efcb165\" (UID: \"fc21aba7-0a91-4583-9c30-7b5f8efcb165\") " Oct 07 08:17:29 crc kubenswrapper[4875]: I1007 08:17:29.447761 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fc21aba7-0a91-4583-9c30-7b5f8efcb165-config\") pod \"fc21aba7-0a91-4583-9c30-7b5f8efcb165\" (UID: \"fc21aba7-0a91-4583-9c30-7b5f8efcb165\") " Oct 07 08:17:29 crc kubenswrapper[4875]: I1007 08:17:29.448022 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/fc21aba7-0a91-4583-9c30-7b5f8efcb165-ovsdbserver-sb\") pod \"fc21aba7-0a91-4583-9c30-7b5f8efcb165\" (UID: \"fc21aba7-0a91-4583-9c30-7b5f8efcb165\") " Oct 07 08:17:29 crc kubenswrapper[4875]: I1007 08:17:29.455146 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fc21aba7-0a91-4583-9c30-7b5f8efcb165-kube-api-access-2df8f" (OuterVolumeSpecName: "kube-api-access-2df8f") pod "fc21aba7-0a91-4583-9c30-7b5f8efcb165" (UID: "fc21aba7-0a91-4583-9c30-7b5f8efcb165"). InnerVolumeSpecName "kube-api-access-2df8f". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 08:17:29 crc kubenswrapper[4875]: I1007 08:17:29.506795 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fc21aba7-0a91-4583-9c30-7b5f8efcb165-config" (OuterVolumeSpecName: "config") pod "fc21aba7-0a91-4583-9c30-7b5f8efcb165" (UID: "fc21aba7-0a91-4583-9c30-7b5f8efcb165"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 08:17:29 crc kubenswrapper[4875]: I1007 08:17:29.508436 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fc21aba7-0a91-4583-9c30-7b5f8efcb165-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "fc21aba7-0a91-4583-9c30-7b5f8efcb165" (UID: "fc21aba7-0a91-4583-9c30-7b5f8efcb165"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 08:17:29 crc kubenswrapper[4875]: I1007 08:17:29.517483 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fc21aba7-0a91-4583-9c30-7b5f8efcb165-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "fc21aba7-0a91-4583-9c30-7b5f8efcb165" (UID: "fc21aba7-0a91-4583-9c30-7b5f8efcb165"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 08:17:29 crc kubenswrapper[4875]: I1007 08:17:29.519866 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fc21aba7-0a91-4583-9c30-7b5f8efcb165-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "fc21aba7-0a91-4583-9c30-7b5f8efcb165" (UID: "fc21aba7-0a91-4583-9c30-7b5f8efcb165"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 08:17:29 crc kubenswrapper[4875]: I1007 08:17:29.525807 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fc21aba7-0a91-4583-9c30-7b5f8efcb165-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "fc21aba7-0a91-4583-9c30-7b5f8efcb165" (UID: "fc21aba7-0a91-4583-9c30-7b5f8efcb165"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 08:17:29 crc kubenswrapper[4875]: I1007 08:17:29.553981 4875 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/fc21aba7-0a91-4583-9c30-7b5f8efcb165-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 07 08:17:29 crc kubenswrapper[4875]: I1007 08:17:29.554253 4875 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/fc21aba7-0a91-4583-9c30-7b5f8efcb165-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 07 08:17:29 crc kubenswrapper[4875]: I1007 08:17:29.554338 4875 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/fc21aba7-0a91-4583-9c30-7b5f8efcb165-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 07 08:17:29 crc kubenswrapper[4875]: I1007 08:17:29.554392 4875 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/fc21aba7-0a91-4583-9c30-7b5f8efcb165-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Oct 07 08:17:29 crc kubenswrapper[4875]: I1007 08:17:29.554444 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2df8f\" (UniqueName: \"kubernetes.io/projected/fc21aba7-0a91-4583-9c30-7b5f8efcb165-kube-api-access-2df8f\") on node \"crc\" DevicePath \"\"" Oct 07 08:17:29 crc kubenswrapper[4875]: I1007 08:17:29.554536 4875 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fc21aba7-0a91-4583-9c30-7b5f8efcb165-config\") on node \"crc\" DevicePath \"\"" Oct 07 08:17:29 crc kubenswrapper[4875]: I1007 08:17:29.712259 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-78c64bc9c5-5gt8q"] Oct 07 08:17:30 crc kubenswrapper[4875]: I1007 08:17:30.242545 4875 generic.go:334] "Generic (PLEG): container finished" podID="f505e712-23fa-4ef3-b464-591427bea934" containerID="eb2276e5377f4d28a200a6e1b660a6deb460b96ab9a7e20db73eccaaab40c6f3" exitCode=0 Oct 07 08:17:30 crc kubenswrapper[4875]: I1007 08:17:30.242628 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78c64bc9c5-5gt8q" event={"ID":"f505e712-23fa-4ef3-b464-591427bea934","Type":"ContainerDied","Data":"eb2276e5377f4d28a200a6e1b660a6deb460b96ab9a7e20db73eccaaab40c6f3"} Oct 07 08:17:30 crc kubenswrapper[4875]: I1007 08:17:30.243131 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-cd5cbd7b9-bpsf7" Oct 07 08:17:30 crc kubenswrapper[4875]: I1007 08:17:30.243200 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78c64bc9c5-5gt8q" event={"ID":"f505e712-23fa-4ef3-b464-591427bea934","Type":"ContainerStarted","Data":"b8f2bcdca66209187d87f59e705f66148f3100a343522b99d262c286195b291f"} Oct 07 08:17:30 crc kubenswrapper[4875]: I1007 08:17:30.303060 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-cd5cbd7b9-bpsf7"] Oct 07 08:17:30 crc kubenswrapper[4875]: I1007 08:17:30.315310 4875 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-cd5cbd7b9-bpsf7"] Oct 07 08:17:31 crc kubenswrapper[4875]: I1007 08:17:31.220654 4875 patch_prober.go:28] interesting pod/machine-config-daemon-hx68m container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 07 08:17:31 crc kubenswrapper[4875]: I1007 08:17:31.221112 4875 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" podUID="3928c10c-c3da-41eb-96b2-629d67cfb31f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 07 08:17:31 crc kubenswrapper[4875]: I1007 08:17:31.255623 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78c64bc9c5-5gt8q" event={"ID":"f505e712-23fa-4ef3-b464-591427bea934","Type":"ContainerStarted","Data":"c8ac37b6e5394b924a2b3f3ac6526e37fb0c28f0bf3fdac520caf138f0092d76"} Oct 07 08:17:31 crc kubenswrapper[4875]: I1007 08:17:31.256022 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-78c64bc9c5-5gt8q" Oct 07 08:17:31 crc kubenswrapper[4875]: I1007 08:17:31.281100 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-78c64bc9c5-5gt8q" podStartSLOduration=3.281081844 podStartE2EDuration="3.281081844s" podCreationTimestamp="2025-10-07 08:17:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 08:17:31.275615669 +0000 UTC m=+1276.235386222" watchObservedRunningTime="2025-10-07 08:17:31.281081844 +0000 UTC m=+1276.240852387" Oct 07 08:17:31 crc kubenswrapper[4875]: I1007 08:17:31.708499 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fc21aba7-0a91-4583-9c30-7b5f8efcb165" path="/var/lib/kubelet/pods/fc21aba7-0a91-4583-9c30-7b5f8efcb165/volumes" Oct 07 08:17:39 crc kubenswrapper[4875]: E1007 08:17:39.130673 4875 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod06c908ee_f087_4e43_904f_5cc1e01a2464.slice\": RecentStats: unable to find data in memory cache]" Oct 07 08:17:39 crc kubenswrapper[4875]: I1007 08:17:39.224231 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-78c64bc9c5-5gt8q" Oct 07 08:17:39 crc kubenswrapper[4875]: I1007 08:17:39.302494 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-d558885bc-fpsjt"] Oct 07 08:17:39 crc kubenswrapper[4875]: I1007 08:17:39.303182 4875 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-d558885bc-fpsjt" podUID="4277aac2-b1ae-485c-a565-5d0b31d4db32" containerName="dnsmasq-dns" containerID="cri-o://1e14407d17296112d97e04877c1bcc1407b46efa24520230ecddecb1a6ba4541" gracePeriod=10 Oct 07 08:17:39 crc kubenswrapper[4875]: I1007 08:17:39.772296 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-d558885bc-fpsjt" Oct 07 08:17:39 crc kubenswrapper[4875]: I1007 08:17:39.890462 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8jkvx\" (UniqueName: \"kubernetes.io/projected/4277aac2-b1ae-485c-a565-5d0b31d4db32-kube-api-access-8jkvx\") pod \"4277aac2-b1ae-485c-a565-5d0b31d4db32\" (UID: \"4277aac2-b1ae-485c-a565-5d0b31d4db32\") " Oct 07 08:17:39 crc kubenswrapper[4875]: I1007 08:17:39.890510 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/4277aac2-b1ae-485c-a565-5d0b31d4db32-ovsdbserver-nb\") pod \"4277aac2-b1ae-485c-a565-5d0b31d4db32\" (UID: \"4277aac2-b1ae-485c-a565-5d0b31d4db32\") " Oct 07 08:17:39 crc kubenswrapper[4875]: I1007 08:17:39.890540 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/4277aac2-b1ae-485c-a565-5d0b31d4db32-openstack-edpm-ipam\") pod \"4277aac2-b1ae-485c-a565-5d0b31d4db32\" (UID: \"4277aac2-b1ae-485c-a565-5d0b31d4db32\") " Oct 07 08:17:39 crc kubenswrapper[4875]: I1007 08:17:39.890577 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/4277aac2-b1ae-485c-a565-5d0b31d4db32-dns-swift-storage-0\") pod \"4277aac2-b1ae-485c-a565-5d0b31d4db32\" (UID: \"4277aac2-b1ae-485c-a565-5d0b31d4db32\") " Oct 07 08:17:39 crc kubenswrapper[4875]: I1007 08:17:39.890630 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4277aac2-b1ae-485c-a565-5d0b31d4db32-config\") pod \"4277aac2-b1ae-485c-a565-5d0b31d4db32\" (UID: \"4277aac2-b1ae-485c-a565-5d0b31d4db32\") " Oct 07 08:17:39 crc kubenswrapper[4875]: I1007 08:17:39.890647 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/4277aac2-b1ae-485c-a565-5d0b31d4db32-ovsdbserver-sb\") pod \"4277aac2-b1ae-485c-a565-5d0b31d4db32\" (UID: \"4277aac2-b1ae-485c-a565-5d0b31d4db32\") " Oct 07 08:17:39 crc kubenswrapper[4875]: I1007 08:17:39.890760 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4277aac2-b1ae-485c-a565-5d0b31d4db32-dns-svc\") pod \"4277aac2-b1ae-485c-a565-5d0b31d4db32\" (UID: \"4277aac2-b1ae-485c-a565-5d0b31d4db32\") " Oct 07 08:17:39 crc kubenswrapper[4875]: I1007 08:17:39.959178 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4277aac2-b1ae-485c-a565-5d0b31d4db32-kube-api-access-8jkvx" (OuterVolumeSpecName: "kube-api-access-8jkvx") pod "4277aac2-b1ae-485c-a565-5d0b31d4db32" (UID: "4277aac2-b1ae-485c-a565-5d0b31d4db32"). InnerVolumeSpecName "kube-api-access-8jkvx". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 08:17:39 crc kubenswrapper[4875]: I1007 08:17:39.971109 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4277aac2-b1ae-485c-a565-5d0b31d4db32-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "4277aac2-b1ae-485c-a565-5d0b31d4db32" (UID: "4277aac2-b1ae-485c-a565-5d0b31d4db32"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 08:17:40 crc kubenswrapper[4875]: I1007 08:17:39.996271 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8jkvx\" (UniqueName: \"kubernetes.io/projected/4277aac2-b1ae-485c-a565-5d0b31d4db32-kube-api-access-8jkvx\") on node \"crc\" DevicePath \"\"" Oct 07 08:17:40 crc kubenswrapper[4875]: I1007 08:17:39.996312 4875 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/4277aac2-b1ae-485c-a565-5d0b31d4db32-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 07 08:17:40 crc kubenswrapper[4875]: I1007 08:17:40.052301 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4277aac2-b1ae-485c-a565-5d0b31d4db32-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "4277aac2-b1ae-485c-a565-5d0b31d4db32" (UID: "4277aac2-b1ae-485c-a565-5d0b31d4db32"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 08:17:40 crc kubenswrapper[4875]: I1007 08:17:40.068325 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4277aac2-b1ae-485c-a565-5d0b31d4db32-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "4277aac2-b1ae-485c-a565-5d0b31d4db32" (UID: "4277aac2-b1ae-485c-a565-5d0b31d4db32"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 08:17:40 crc kubenswrapper[4875]: I1007 08:17:40.073865 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4277aac2-b1ae-485c-a565-5d0b31d4db32-config" (OuterVolumeSpecName: "config") pod "4277aac2-b1ae-485c-a565-5d0b31d4db32" (UID: "4277aac2-b1ae-485c-a565-5d0b31d4db32"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 08:17:40 crc kubenswrapper[4875]: I1007 08:17:40.075674 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4277aac2-b1ae-485c-a565-5d0b31d4db32-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "4277aac2-b1ae-485c-a565-5d0b31d4db32" (UID: "4277aac2-b1ae-485c-a565-5d0b31d4db32"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 08:17:40 crc kubenswrapper[4875]: I1007 08:17:40.079346 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4277aac2-b1ae-485c-a565-5d0b31d4db32-openstack-edpm-ipam" (OuterVolumeSpecName: "openstack-edpm-ipam") pod "4277aac2-b1ae-485c-a565-5d0b31d4db32" (UID: "4277aac2-b1ae-485c-a565-5d0b31d4db32"). InnerVolumeSpecName "openstack-edpm-ipam". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 08:17:40 crc kubenswrapper[4875]: I1007 08:17:40.098461 4875 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4277aac2-b1ae-485c-a565-5d0b31d4db32-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 07 08:17:40 crc kubenswrapper[4875]: I1007 08:17:40.098493 4875 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/4277aac2-b1ae-485c-a565-5d0b31d4db32-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 07 08:17:40 crc kubenswrapper[4875]: I1007 08:17:40.098503 4875 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/4277aac2-b1ae-485c-a565-5d0b31d4db32-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Oct 07 08:17:40 crc kubenswrapper[4875]: I1007 08:17:40.098514 4875 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/4277aac2-b1ae-485c-a565-5d0b31d4db32-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Oct 07 08:17:40 crc kubenswrapper[4875]: I1007 08:17:40.098522 4875 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4277aac2-b1ae-485c-a565-5d0b31d4db32-config\") on node \"crc\" DevicePath \"\"" Oct 07 08:17:40 crc kubenswrapper[4875]: I1007 08:17:40.354658 4875 generic.go:334] "Generic (PLEG): container finished" podID="4277aac2-b1ae-485c-a565-5d0b31d4db32" containerID="1e14407d17296112d97e04877c1bcc1407b46efa24520230ecddecb1a6ba4541" exitCode=0 Oct 07 08:17:40 crc kubenswrapper[4875]: I1007 08:17:40.354706 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-d558885bc-fpsjt" event={"ID":"4277aac2-b1ae-485c-a565-5d0b31d4db32","Type":"ContainerDied","Data":"1e14407d17296112d97e04877c1bcc1407b46efa24520230ecddecb1a6ba4541"} Oct 07 08:17:40 crc kubenswrapper[4875]: I1007 08:17:40.354728 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-d558885bc-fpsjt" Oct 07 08:17:40 crc kubenswrapper[4875]: I1007 08:17:40.354751 4875 scope.go:117] "RemoveContainer" containerID="1e14407d17296112d97e04877c1bcc1407b46efa24520230ecddecb1a6ba4541" Oct 07 08:17:40 crc kubenswrapper[4875]: I1007 08:17:40.354739 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-d558885bc-fpsjt" event={"ID":"4277aac2-b1ae-485c-a565-5d0b31d4db32","Type":"ContainerDied","Data":"c3215f766b584683b85063a28e77ca0e73c01b9342dba1a44c0afcf1b11b3bb2"} Oct 07 08:17:40 crc kubenswrapper[4875]: I1007 08:17:40.375907 4875 scope.go:117] "RemoveContainer" containerID="415f0ef6d68b46bb72116850f69531a27752fa36c1986d8708d4dab7a64cb591" Oct 07 08:17:40 crc kubenswrapper[4875]: I1007 08:17:40.401174 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-d558885bc-fpsjt"] Oct 07 08:17:40 crc kubenswrapper[4875]: I1007 08:17:40.409476 4875 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-d558885bc-fpsjt"] Oct 07 08:17:40 crc kubenswrapper[4875]: I1007 08:17:40.417855 4875 scope.go:117] "RemoveContainer" containerID="1e14407d17296112d97e04877c1bcc1407b46efa24520230ecddecb1a6ba4541" Oct 07 08:17:40 crc kubenswrapper[4875]: E1007 08:17:40.418427 4875 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1e14407d17296112d97e04877c1bcc1407b46efa24520230ecddecb1a6ba4541\": container with ID starting with 1e14407d17296112d97e04877c1bcc1407b46efa24520230ecddecb1a6ba4541 not found: ID does not exist" containerID="1e14407d17296112d97e04877c1bcc1407b46efa24520230ecddecb1a6ba4541" Oct 07 08:17:40 crc kubenswrapper[4875]: I1007 08:17:40.418479 4875 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1e14407d17296112d97e04877c1bcc1407b46efa24520230ecddecb1a6ba4541"} err="failed to get container status \"1e14407d17296112d97e04877c1bcc1407b46efa24520230ecddecb1a6ba4541\": rpc error: code = NotFound desc = could not find container \"1e14407d17296112d97e04877c1bcc1407b46efa24520230ecddecb1a6ba4541\": container with ID starting with 1e14407d17296112d97e04877c1bcc1407b46efa24520230ecddecb1a6ba4541 not found: ID does not exist" Oct 07 08:17:40 crc kubenswrapper[4875]: I1007 08:17:40.418514 4875 scope.go:117] "RemoveContainer" containerID="415f0ef6d68b46bb72116850f69531a27752fa36c1986d8708d4dab7a64cb591" Oct 07 08:17:40 crc kubenswrapper[4875]: E1007 08:17:40.418863 4875 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"415f0ef6d68b46bb72116850f69531a27752fa36c1986d8708d4dab7a64cb591\": container with ID starting with 415f0ef6d68b46bb72116850f69531a27752fa36c1986d8708d4dab7a64cb591 not found: ID does not exist" containerID="415f0ef6d68b46bb72116850f69531a27752fa36c1986d8708d4dab7a64cb591" Oct 07 08:17:40 crc kubenswrapper[4875]: I1007 08:17:40.418915 4875 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"415f0ef6d68b46bb72116850f69531a27752fa36c1986d8708d4dab7a64cb591"} err="failed to get container status \"415f0ef6d68b46bb72116850f69531a27752fa36c1986d8708d4dab7a64cb591\": rpc error: code = NotFound desc = could not find container \"415f0ef6d68b46bb72116850f69531a27752fa36c1986d8708d4dab7a64cb591\": container with ID starting with 415f0ef6d68b46bb72116850f69531a27752fa36c1986d8708d4dab7a64cb591 not found: ID does not exist" Oct 07 08:17:41 crc kubenswrapper[4875]: I1007 08:17:41.723777 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4277aac2-b1ae-485c-a565-5d0b31d4db32" path="/var/lib/kubelet/pods/4277aac2-b1ae-485c-a565-5d0b31d4db32/volumes" Oct 07 08:17:49 crc kubenswrapper[4875]: E1007 08:17:49.366167 4875 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod06c908ee_f087_4e43_904f_5cc1e01a2464.slice\": RecentStats: unable to find data in memory cache]" Oct 07 08:17:51 crc kubenswrapper[4875]: I1007 08:17:51.468545 4875 generic.go:334] "Generic (PLEG): container finished" podID="e2d60697-89d7-42a0-9457-efef02815764" containerID="3448e5b593d30c1da84fba72a6bb315bcb86bb18c9c0c0f0186d61d15d69488c" exitCode=0 Oct 07 08:17:51 crc kubenswrapper[4875]: I1007 08:17:51.468650 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"e2d60697-89d7-42a0-9457-efef02815764","Type":"ContainerDied","Data":"3448e5b593d30c1da84fba72a6bb315bcb86bb18c9c0c0f0186d61d15d69488c"} Oct 07 08:17:52 crc kubenswrapper[4875]: I1007 08:17:52.295710 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-grkp4"] Oct 07 08:17:52 crc kubenswrapper[4875]: E1007 08:17:52.296518 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4277aac2-b1ae-485c-a565-5d0b31d4db32" containerName="dnsmasq-dns" Oct 07 08:17:52 crc kubenswrapper[4875]: I1007 08:17:52.296538 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="4277aac2-b1ae-485c-a565-5d0b31d4db32" containerName="dnsmasq-dns" Oct 07 08:17:52 crc kubenswrapper[4875]: E1007 08:17:52.296581 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fc21aba7-0a91-4583-9c30-7b5f8efcb165" containerName="init" Oct 07 08:17:52 crc kubenswrapper[4875]: I1007 08:17:52.296597 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="fc21aba7-0a91-4583-9c30-7b5f8efcb165" containerName="init" Oct 07 08:17:52 crc kubenswrapper[4875]: E1007 08:17:52.296612 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4277aac2-b1ae-485c-a565-5d0b31d4db32" containerName="init" Oct 07 08:17:52 crc kubenswrapper[4875]: I1007 08:17:52.296619 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="4277aac2-b1ae-485c-a565-5d0b31d4db32" containerName="init" Oct 07 08:17:52 crc kubenswrapper[4875]: E1007 08:17:52.296634 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fc21aba7-0a91-4583-9c30-7b5f8efcb165" containerName="dnsmasq-dns" Oct 07 08:17:52 crc kubenswrapper[4875]: I1007 08:17:52.296651 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="fc21aba7-0a91-4583-9c30-7b5f8efcb165" containerName="dnsmasq-dns" Oct 07 08:17:52 crc kubenswrapper[4875]: I1007 08:17:52.296856 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="fc21aba7-0a91-4583-9c30-7b5f8efcb165" containerName="dnsmasq-dns" Oct 07 08:17:52 crc kubenswrapper[4875]: I1007 08:17:52.296911 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="4277aac2-b1ae-485c-a565-5d0b31d4db32" containerName="dnsmasq-dns" Oct 07 08:17:52 crc kubenswrapper[4875]: I1007 08:17:52.297541 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-grkp4" Oct 07 08:17:52 crc kubenswrapper[4875]: I1007 08:17:52.300603 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-bl4pq" Oct 07 08:17:52 crc kubenswrapper[4875]: I1007 08:17:52.300928 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 07 08:17:52 crc kubenswrapper[4875]: I1007 08:17:52.301087 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 07 08:17:52 crc kubenswrapper[4875]: I1007 08:17:52.301717 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 07 08:17:52 crc kubenswrapper[4875]: I1007 08:17:52.315856 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-grkp4"] Oct 07 08:17:52 crc kubenswrapper[4875]: I1007 08:17:52.398001 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cdf9r\" (UniqueName: \"kubernetes.io/projected/23730b76-30ac-47bf-8043-3c713a209e1e-kube-api-access-cdf9r\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-grkp4\" (UID: \"23730b76-30ac-47bf-8043-3c713a209e1e\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-grkp4" Oct 07 08:17:52 crc kubenswrapper[4875]: I1007 08:17:52.398055 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/23730b76-30ac-47bf-8043-3c713a209e1e-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-grkp4\" (UID: \"23730b76-30ac-47bf-8043-3c713a209e1e\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-grkp4" Oct 07 08:17:52 crc kubenswrapper[4875]: I1007 08:17:52.398120 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/23730b76-30ac-47bf-8043-3c713a209e1e-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-grkp4\" (UID: \"23730b76-30ac-47bf-8043-3c713a209e1e\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-grkp4" Oct 07 08:17:52 crc kubenswrapper[4875]: I1007 08:17:52.398173 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/23730b76-30ac-47bf-8043-3c713a209e1e-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-grkp4\" (UID: \"23730b76-30ac-47bf-8043-3c713a209e1e\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-grkp4" Oct 07 08:17:52 crc kubenswrapper[4875]: I1007 08:17:52.483662 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"e2d60697-89d7-42a0-9457-efef02815764","Type":"ContainerStarted","Data":"43e9b9cd793b556ef8bc1669ce89ec35599d3a2187d9472cd773f34c7a0bd68c"} Oct 07 08:17:52 crc kubenswrapper[4875]: I1007 08:17:52.484368 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Oct 07 08:17:52 crc kubenswrapper[4875]: I1007 08:17:52.486182 4875 generic.go:334] "Generic (PLEG): container finished" podID="8bff65c7-6542-4501-90d2-fedc97d9a9d7" containerID="9cd331d7e89e37ebf1fda9dedb99c4ddd45e9906a4e9d85c8fe569766a379252" exitCode=0 Oct 07 08:17:52 crc kubenswrapper[4875]: I1007 08:17:52.486216 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"8bff65c7-6542-4501-90d2-fedc97d9a9d7","Type":"ContainerDied","Data":"9cd331d7e89e37ebf1fda9dedb99c4ddd45e9906a4e9d85c8fe569766a379252"} Oct 07 08:17:52 crc kubenswrapper[4875]: I1007 08:17:52.500021 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/23730b76-30ac-47bf-8043-3c713a209e1e-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-grkp4\" (UID: \"23730b76-30ac-47bf-8043-3c713a209e1e\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-grkp4" Oct 07 08:17:52 crc kubenswrapper[4875]: I1007 08:17:52.500080 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/23730b76-30ac-47bf-8043-3c713a209e1e-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-grkp4\" (UID: \"23730b76-30ac-47bf-8043-3c713a209e1e\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-grkp4" Oct 07 08:17:52 crc kubenswrapper[4875]: I1007 08:17:52.500273 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cdf9r\" (UniqueName: \"kubernetes.io/projected/23730b76-30ac-47bf-8043-3c713a209e1e-kube-api-access-cdf9r\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-grkp4\" (UID: \"23730b76-30ac-47bf-8043-3c713a209e1e\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-grkp4" Oct 07 08:17:52 crc kubenswrapper[4875]: I1007 08:17:52.500304 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/23730b76-30ac-47bf-8043-3c713a209e1e-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-grkp4\" (UID: \"23730b76-30ac-47bf-8043-3c713a209e1e\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-grkp4" Oct 07 08:17:52 crc kubenswrapper[4875]: I1007 08:17:52.507287 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/23730b76-30ac-47bf-8043-3c713a209e1e-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-grkp4\" (UID: \"23730b76-30ac-47bf-8043-3c713a209e1e\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-grkp4" Oct 07 08:17:52 crc kubenswrapper[4875]: I1007 08:17:52.507680 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/23730b76-30ac-47bf-8043-3c713a209e1e-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-grkp4\" (UID: \"23730b76-30ac-47bf-8043-3c713a209e1e\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-grkp4" Oct 07 08:17:52 crc kubenswrapper[4875]: I1007 08:17:52.507742 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/23730b76-30ac-47bf-8043-3c713a209e1e-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-grkp4\" (UID: \"23730b76-30ac-47bf-8043-3c713a209e1e\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-grkp4" Oct 07 08:17:52 crc kubenswrapper[4875]: I1007 08:17:52.523307 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cdf9r\" (UniqueName: \"kubernetes.io/projected/23730b76-30ac-47bf-8043-3c713a209e1e-kube-api-access-cdf9r\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-grkp4\" (UID: \"23730b76-30ac-47bf-8043-3c713a209e1e\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-grkp4" Oct 07 08:17:52 crc kubenswrapper[4875]: I1007 08:17:52.536251 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=36.53623223 podStartE2EDuration="36.53623223s" podCreationTimestamp="2025-10-07 08:17:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 08:17:52.531571482 +0000 UTC m=+1297.491342055" watchObservedRunningTime="2025-10-07 08:17:52.53623223 +0000 UTC m=+1297.496002773" Oct 07 08:17:52 crc kubenswrapper[4875]: I1007 08:17:52.675675 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-grkp4" Oct 07 08:17:53 crc kubenswrapper[4875]: I1007 08:17:53.216788 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-grkp4"] Oct 07 08:17:53 crc kubenswrapper[4875]: W1007 08:17:53.222830 4875 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod23730b76_30ac_47bf_8043_3c713a209e1e.slice/crio-947dc752dda99e5d191682645bade5a0eb88da296031fa351a86110c4e859ecf WatchSource:0}: Error finding container 947dc752dda99e5d191682645bade5a0eb88da296031fa351a86110c4e859ecf: Status 404 returned error can't find the container with id 947dc752dda99e5d191682645bade5a0eb88da296031fa351a86110c4e859ecf Oct 07 08:17:53 crc kubenswrapper[4875]: I1007 08:17:53.226237 4875 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 07 08:17:53 crc kubenswrapper[4875]: I1007 08:17:53.496676 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-grkp4" event={"ID":"23730b76-30ac-47bf-8043-3c713a209e1e","Type":"ContainerStarted","Data":"947dc752dda99e5d191682645bade5a0eb88da296031fa351a86110c4e859ecf"} Oct 07 08:17:53 crc kubenswrapper[4875]: I1007 08:17:53.507088 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"8bff65c7-6542-4501-90d2-fedc97d9a9d7","Type":"ContainerStarted","Data":"2153669f391753cec36f71ffbf400241aca680dd8c0ad5bc0b12918b369768f2"} Oct 07 08:17:53 crc kubenswrapper[4875]: I1007 08:17:53.539993 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=36.539972973 podStartE2EDuration="36.539972973s" podCreationTimestamp="2025-10-07 08:17:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 08:17:53.531026739 +0000 UTC m=+1298.490797292" watchObservedRunningTime="2025-10-07 08:17:53.539972973 +0000 UTC m=+1298.499743516" Oct 07 08:17:57 crc kubenswrapper[4875]: I1007 08:17:57.500194 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Oct 07 08:17:59 crc kubenswrapper[4875]: E1007 08:17:59.643161 4875 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod06c908ee_f087_4e43_904f_5cc1e01a2464.slice\": RecentStats: unable to find data in memory cache]" Oct 07 08:18:01 crc kubenswrapper[4875]: I1007 08:18:01.221442 4875 patch_prober.go:28] interesting pod/machine-config-daemon-hx68m container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 07 08:18:01 crc kubenswrapper[4875]: I1007 08:18:01.222063 4875 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" podUID="3928c10c-c3da-41eb-96b2-629d67cfb31f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 07 08:18:03 crc kubenswrapper[4875]: I1007 08:18:03.614042 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-grkp4" event={"ID":"23730b76-30ac-47bf-8043-3c713a209e1e","Type":"ContainerStarted","Data":"14f7fff263d9ae5565f13f7d08f6b0bf8ed92a6c01176474903592c667c7a1b8"} Oct 07 08:18:03 crc kubenswrapper[4875]: I1007 08:18:03.649516 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-grkp4" podStartSLOduration=2.394165172 podStartE2EDuration="11.649494301s" podCreationTimestamp="2025-10-07 08:17:52 +0000 UTC" firstStartedPulling="2025-10-07 08:17:53.226044452 +0000 UTC m=+1298.185814995" lastFinishedPulling="2025-10-07 08:18:02.481373581 +0000 UTC m=+1307.441144124" observedRunningTime="2025-10-07 08:18:03.641376903 +0000 UTC m=+1308.601147446" watchObservedRunningTime="2025-10-07 08:18:03.649494301 +0000 UTC m=+1308.609264844" Oct 07 08:18:06 crc kubenswrapper[4875]: I1007 08:18:06.533170 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Oct 07 08:18:07 crc kubenswrapper[4875]: I1007 08:18:07.503161 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Oct 07 08:18:09 crc kubenswrapper[4875]: E1007 08:18:09.892712 4875 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod06c908ee_f087_4e43_904f_5cc1e01a2464.slice\": RecentStats: unable to find data in memory cache]" Oct 07 08:18:14 crc kubenswrapper[4875]: I1007 08:18:14.734366 4875 generic.go:334] "Generic (PLEG): container finished" podID="23730b76-30ac-47bf-8043-3c713a209e1e" containerID="14f7fff263d9ae5565f13f7d08f6b0bf8ed92a6c01176474903592c667c7a1b8" exitCode=0 Oct 07 08:18:14 crc kubenswrapper[4875]: I1007 08:18:14.734519 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-grkp4" event={"ID":"23730b76-30ac-47bf-8043-3c713a209e1e","Type":"ContainerDied","Data":"14f7fff263d9ae5565f13f7d08f6b0bf8ed92a6c01176474903592c667c7a1b8"} Oct 07 08:18:16 crc kubenswrapper[4875]: I1007 08:18:16.155333 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-grkp4" Oct 07 08:18:16 crc kubenswrapper[4875]: I1007 08:18:16.295261 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/23730b76-30ac-47bf-8043-3c713a209e1e-repo-setup-combined-ca-bundle\") pod \"23730b76-30ac-47bf-8043-3c713a209e1e\" (UID: \"23730b76-30ac-47bf-8043-3c713a209e1e\") " Oct 07 08:18:16 crc kubenswrapper[4875]: I1007 08:18:16.295315 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cdf9r\" (UniqueName: \"kubernetes.io/projected/23730b76-30ac-47bf-8043-3c713a209e1e-kube-api-access-cdf9r\") pod \"23730b76-30ac-47bf-8043-3c713a209e1e\" (UID: \"23730b76-30ac-47bf-8043-3c713a209e1e\") " Oct 07 08:18:16 crc kubenswrapper[4875]: I1007 08:18:16.295675 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/23730b76-30ac-47bf-8043-3c713a209e1e-ssh-key\") pod \"23730b76-30ac-47bf-8043-3c713a209e1e\" (UID: \"23730b76-30ac-47bf-8043-3c713a209e1e\") " Oct 07 08:18:16 crc kubenswrapper[4875]: I1007 08:18:16.295745 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/23730b76-30ac-47bf-8043-3c713a209e1e-inventory\") pod \"23730b76-30ac-47bf-8043-3c713a209e1e\" (UID: \"23730b76-30ac-47bf-8043-3c713a209e1e\") " Oct 07 08:18:16 crc kubenswrapper[4875]: I1007 08:18:16.301935 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/23730b76-30ac-47bf-8043-3c713a209e1e-repo-setup-combined-ca-bundle" (OuterVolumeSpecName: "repo-setup-combined-ca-bundle") pod "23730b76-30ac-47bf-8043-3c713a209e1e" (UID: "23730b76-30ac-47bf-8043-3c713a209e1e"). InnerVolumeSpecName "repo-setup-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:18:16 crc kubenswrapper[4875]: I1007 08:18:16.303020 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/23730b76-30ac-47bf-8043-3c713a209e1e-kube-api-access-cdf9r" (OuterVolumeSpecName: "kube-api-access-cdf9r") pod "23730b76-30ac-47bf-8043-3c713a209e1e" (UID: "23730b76-30ac-47bf-8043-3c713a209e1e"). InnerVolumeSpecName "kube-api-access-cdf9r". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 08:18:16 crc kubenswrapper[4875]: I1007 08:18:16.327970 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/23730b76-30ac-47bf-8043-3c713a209e1e-inventory" (OuterVolumeSpecName: "inventory") pod "23730b76-30ac-47bf-8043-3c713a209e1e" (UID: "23730b76-30ac-47bf-8043-3c713a209e1e"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:18:16 crc kubenswrapper[4875]: I1007 08:18:16.352999 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/23730b76-30ac-47bf-8043-3c713a209e1e-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "23730b76-30ac-47bf-8043-3c713a209e1e" (UID: "23730b76-30ac-47bf-8043-3c713a209e1e"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:18:16 crc kubenswrapper[4875]: I1007 08:18:16.397845 4875 reconciler_common.go:293] "Volume detached for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/23730b76-30ac-47bf-8043-3c713a209e1e-repo-setup-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 08:18:16 crc kubenswrapper[4875]: I1007 08:18:16.397921 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cdf9r\" (UniqueName: \"kubernetes.io/projected/23730b76-30ac-47bf-8043-3c713a209e1e-kube-api-access-cdf9r\") on node \"crc\" DevicePath \"\"" Oct 07 08:18:16 crc kubenswrapper[4875]: I1007 08:18:16.397938 4875 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/23730b76-30ac-47bf-8043-3c713a209e1e-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 07 08:18:16 crc kubenswrapper[4875]: I1007 08:18:16.397958 4875 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/23730b76-30ac-47bf-8043-3c713a209e1e-inventory\") on node \"crc\" DevicePath \"\"" Oct 07 08:18:16 crc kubenswrapper[4875]: I1007 08:18:16.756371 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-grkp4" event={"ID":"23730b76-30ac-47bf-8043-3c713a209e1e","Type":"ContainerDied","Data":"947dc752dda99e5d191682645bade5a0eb88da296031fa351a86110c4e859ecf"} Oct 07 08:18:16 crc kubenswrapper[4875]: I1007 08:18:16.756418 4875 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="947dc752dda99e5d191682645bade5a0eb88da296031fa351a86110c4e859ecf" Oct 07 08:18:16 crc kubenswrapper[4875]: I1007 08:18:16.756453 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-grkp4" Oct 07 08:18:16 crc kubenswrapper[4875]: I1007 08:18:16.880064 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/redhat-edpm-deployment-openstack-edpm-ipam-mztgk"] Oct 07 08:18:16 crc kubenswrapper[4875]: E1007 08:18:16.880535 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="23730b76-30ac-47bf-8043-3c713a209e1e" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Oct 07 08:18:16 crc kubenswrapper[4875]: I1007 08:18:16.880558 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="23730b76-30ac-47bf-8043-3c713a209e1e" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Oct 07 08:18:16 crc kubenswrapper[4875]: I1007 08:18:16.880791 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="23730b76-30ac-47bf-8043-3c713a209e1e" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Oct 07 08:18:16 crc kubenswrapper[4875]: I1007 08:18:16.881484 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-mztgk" Oct 07 08:18:16 crc kubenswrapper[4875]: I1007 08:18:16.883438 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 07 08:18:16 crc kubenswrapper[4875]: I1007 08:18:16.883561 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 07 08:18:16 crc kubenswrapper[4875]: I1007 08:18:16.885236 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 07 08:18:16 crc kubenswrapper[4875]: I1007 08:18:16.885291 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-bl4pq" Oct 07 08:18:16 crc kubenswrapper[4875]: I1007 08:18:16.900736 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/redhat-edpm-deployment-openstack-edpm-ipam-mztgk"] Oct 07 08:18:17 crc kubenswrapper[4875]: I1007 08:18:17.013355 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/19285527-95a7-43c0-9366-3d8895c09835-ssh-key\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-mztgk\" (UID: \"19285527-95a7-43c0-9366-3d8895c09835\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-mztgk" Oct 07 08:18:17 crc kubenswrapper[4875]: I1007 08:18:17.013409 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5jzzj\" (UniqueName: \"kubernetes.io/projected/19285527-95a7-43c0-9366-3d8895c09835-kube-api-access-5jzzj\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-mztgk\" (UID: \"19285527-95a7-43c0-9366-3d8895c09835\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-mztgk" Oct 07 08:18:17 crc kubenswrapper[4875]: I1007 08:18:17.013500 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/19285527-95a7-43c0-9366-3d8895c09835-inventory\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-mztgk\" (UID: \"19285527-95a7-43c0-9366-3d8895c09835\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-mztgk" Oct 07 08:18:17 crc kubenswrapper[4875]: I1007 08:18:17.117354 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/19285527-95a7-43c0-9366-3d8895c09835-ssh-key\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-mztgk\" (UID: \"19285527-95a7-43c0-9366-3d8895c09835\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-mztgk" Oct 07 08:18:17 crc kubenswrapper[4875]: I1007 08:18:17.117463 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5jzzj\" (UniqueName: \"kubernetes.io/projected/19285527-95a7-43c0-9366-3d8895c09835-kube-api-access-5jzzj\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-mztgk\" (UID: \"19285527-95a7-43c0-9366-3d8895c09835\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-mztgk" Oct 07 08:18:17 crc kubenswrapper[4875]: I1007 08:18:17.117690 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/19285527-95a7-43c0-9366-3d8895c09835-inventory\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-mztgk\" (UID: \"19285527-95a7-43c0-9366-3d8895c09835\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-mztgk" Oct 07 08:18:17 crc kubenswrapper[4875]: I1007 08:18:17.122729 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/19285527-95a7-43c0-9366-3d8895c09835-inventory\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-mztgk\" (UID: \"19285527-95a7-43c0-9366-3d8895c09835\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-mztgk" Oct 07 08:18:17 crc kubenswrapper[4875]: I1007 08:18:17.122802 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/19285527-95a7-43c0-9366-3d8895c09835-ssh-key\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-mztgk\" (UID: \"19285527-95a7-43c0-9366-3d8895c09835\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-mztgk" Oct 07 08:18:17 crc kubenswrapper[4875]: I1007 08:18:17.146052 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5jzzj\" (UniqueName: \"kubernetes.io/projected/19285527-95a7-43c0-9366-3d8895c09835-kube-api-access-5jzzj\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-mztgk\" (UID: \"19285527-95a7-43c0-9366-3d8895c09835\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-mztgk" Oct 07 08:18:17 crc kubenswrapper[4875]: I1007 08:18:17.202317 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-mztgk" Oct 07 08:18:17 crc kubenswrapper[4875]: I1007 08:18:17.758720 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/redhat-edpm-deployment-openstack-edpm-ipam-mztgk"] Oct 07 08:18:17 crc kubenswrapper[4875]: W1007 08:18:17.768144 4875 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod19285527_95a7_43c0_9366_3d8895c09835.slice/crio-3ace2690a3e2bdb434f2052dcf5562f38739737db1fd4976273666700d61d420 WatchSource:0}: Error finding container 3ace2690a3e2bdb434f2052dcf5562f38739737db1fd4976273666700d61d420: Status 404 returned error can't find the container with id 3ace2690a3e2bdb434f2052dcf5562f38739737db1fd4976273666700d61d420 Oct 07 08:18:17 crc kubenswrapper[4875]: I1007 08:18:17.822551 4875 scope.go:117] "RemoveContainer" containerID="5f71368c43c156d25b2ce09ab20d0658f6aa74db195a97c64816049fe1afdd14" Oct 07 08:18:18 crc kubenswrapper[4875]: I1007 08:18:18.782651 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-mztgk" event={"ID":"19285527-95a7-43c0-9366-3d8895c09835","Type":"ContainerStarted","Data":"f5a4faa2465b8d935e0532e581537f6642842a1f0c069a8298579a959e05482f"} Oct 07 08:18:18 crc kubenswrapper[4875]: I1007 08:18:18.783127 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-mztgk" event={"ID":"19285527-95a7-43c0-9366-3d8895c09835","Type":"ContainerStarted","Data":"3ace2690a3e2bdb434f2052dcf5562f38739737db1fd4976273666700d61d420"} Oct 07 08:18:18 crc kubenswrapper[4875]: I1007 08:18:18.800698 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-mztgk" podStartSLOduration=2.361866854 podStartE2EDuration="2.800675666s" podCreationTimestamp="2025-10-07 08:18:16 +0000 UTC" firstStartedPulling="2025-10-07 08:18:17.771270777 +0000 UTC m=+1322.731041320" lastFinishedPulling="2025-10-07 08:18:18.210079589 +0000 UTC m=+1323.169850132" observedRunningTime="2025-10-07 08:18:18.797508605 +0000 UTC m=+1323.757279148" watchObservedRunningTime="2025-10-07 08:18:18.800675666 +0000 UTC m=+1323.760446219" Oct 07 08:18:21 crc kubenswrapper[4875]: I1007 08:18:21.809562 4875 generic.go:334] "Generic (PLEG): container finished" podID="19285527-95a7-43c0-9366-3d8895c09835" containerID="f5a4faa2465b8d935e0532e581537f6642842a1f0c069a8298579a959e05482f" exitCode=0 Oct 07 08:18:21 crc kubenswrapper[4875]: I1007 08:18:21.809657 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-mztgk" event={"ID":"19285527-95a7-43c0-9366-3d8895c09835","Type":"ContainerDied","Data":"f5a4faa2465b8d935e0532e581537f6642842a1f0c069a8298579a959e05482f"} Oct 07 08:18:23 crc kubenswrapper[4875]: I1007 08:18:23.238300 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-mztgk" Oct 07 08:18:23 crc kubenswrapper[4875]: I1007 08:18:23.347078 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/19285527-95a7-43c0-9366-3d8895c09835-inventory\") pod \"19285527-95a7-43c0-9366-3d8895c09835\" (UID: \"19285527-95a7-43c0-9366-3d8895c09835\") " Oct 07 08:18:23 crc kubenswrapper[4875]: I1007 08:18:23.347300 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5jzzj\" (UniqueName: \"kubernetes.io/projected/19285527-95a7-43c0-9366-3d8895c09835-kube-api-access-5jzzj\") pod \"19285527-95a7-43c0-9366-3d8895c09835\" (UID: \"19285527-95a7-43c0-9366-3d8895c09835\") " Oct 07 08:18:23 crc kubenswrapper[4875]: I1007 08:18:23.347449 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/19285527-95a7-43c0-9366-3d8895c09835-ssh-key\") pod \"19285527-95a7-43c0-9366-3d8895c09835\" (UID: \"19285527-95a7-43c0-9366-3d8895c09835\") " Oct 07 08:18:23 crc kubenswrapper[4875]: I1007 08:18:23.353928 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/19285527-95a7-43c0-9366-3d8895c09835-kube-api-access-5jzzj" (OuterVolumeSpecName: "kube-api-access-5jzzj") pod "19285527-95a7-43c0-9366-3d8895c09835" (UID: "19285527-95a7-43c0-9366-3d8895c09835"). InnerVolumeSpecName "kube-api-access-5jzzj". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 08:18:23 crc kubenswrapper[4875]: I1007 08:18:23.378687 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/19285527-95a7-43c0-9366-3d8895c09835-inventory" (OuterVolumeSpecName: "inventory") pod "19285527-95a7-43c0-9366-3d8895c09835" (UID: "19285527-95a7-43c0-9366-3d8895c09835"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:18:23 crc kubenswrapper[4875]: I1007 08:18:23.378726 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/19285527-95a7-43c0-9366-3d8895c09835-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "19285527-95a7-43c0-9366-3d8895c09835" (UID: "19285527-95a7-43c0-9366-3d8895c09835"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:18:23 crc kubenswrapper[4875]: I1007 08:18:23.449753 4875 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/19285527-95a7-43c0-9366-3d8895c09835-inventory\") on node \"crc\" DevicePath \"\"" Oct 07 08:18:23 crc kubenswrapper[4875]: I1007 08:18:23.449806 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5jzzj\" (UniqueName: \"kubernetes.io/projected/19285527-95a7-43c0-9366-3d8895c09835-kube-api-access-5jzzj\") on node \"crc\" DevicePath \"\"" Oct 07 08:18:23 crc kubenswrapper[4875]: I1007 08:18:23.449822 4875 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/19285527-95a7-43c0-9366-3d8895c09835-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 07 08:18:23 crc kubenswrapper[4875]: I1007 08:18:23.829315 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-mztgk" event={"ID":"19285527-95a7-43c0-9366-3d8895c09835","Type":"ContainerDied","Data":"3ace2690a3e2bdb434f2052dcf5562f38739737db1fd4976273666700d61d420"} Oct 07 08:18:23 crc kubenswrapper[4875]: I1007 08:18:23.829353 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-mztgk" Oct 07 08:18:23 crc kubenswrapper[4875]: I1007 08:18:23.829365 4875 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3ace2690a3e2bdb434f2052dcf5562f38739737db1fd4976273666700d61d420" Oct 07 08:18:23 crc kubenswrapper[4875]: I1007 08:18:23.899832 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-wd6f4"] Oct 07 08:18:23 crc kubenswrapper[4875]: E1007 08:18:23.900305 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="19285527-95a7-43c0-9366-3d8895c09835" containerName="redhat-edpm-deployment-openstack-edpm-ipam" Oct 07 08:18:23 crc kubenswrapper[4875]: I1007 08:18:23.900321 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="19285527-95a7-43c0-9366-3d8895c09835" containerName="redhat-edpm-deployment-openstack-edpm-ipam" Oct 07 08:18:23 crc kubenswrapper[4875]: I1007 08:18:23.900529 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="19285527-95a7-43c0-9366-3d8895c09835" containerName="redhat-edpm-deployment-openstack-edpm-ipam" Oct 07 08:18:23 crc kubenswrapper[4875]: I1007 08:18:23.901239 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-wd6f4" Oct 07 08:18:23 crc kubenswrapper[4875]: I1007 08:18:23.905043 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-bl4pq" Oct 07 08:18:23 crc kubenswrapper[4875]: I1007 08:18:23.905679 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 07 08:18:23 crc kubenswrapper[4875]: I1007 08:18:23.906362 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 07 08:18:23 crc kubenswrapper[4875]: I1007 08:18:23.955657 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 07 08:18:23 crc kubenswrapper[4875]: I1007 08:18:23.958264 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-wd6f4"] Oct 07 08:18:23 crc kubenswrapper[4875]: I1007 08:18:23.977032 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4tz88\" (UniqueName: \"kubernetes.io/projected/15ba8b2a-ed31-47c5-b655-efb44ceb0134-kube-api-access-4tz88\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-wd6f4\" (UID: \"15ba8b2a-ed31-47c5-b655-efb44ceb0134\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-wd6f4" Oct 07 08:18:23 crc kubenswrapper[4875]: I1007 08:18:23.977229 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/15ba8b2a-ed31-47c5-b655-efb44ceb0134-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-wd6f4\" (UID: \"15ba8b2a-ed31-47c5-b655-efb44ceb0134\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-wd6f4" Oct 07 08:18:23 crc kubenswrapper[4875]: I1007 08:18:23.977658 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/15ba8b2a-ed31-47c5-b655-efb44ceb0134-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-wd6f4\" (UID: \"15ba8b2a-ed31-47c5-b655-efb44ceb0134\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-wd6f4" Oct 07 08:18:23 crc kubenswrapper[4875]: I1007 08:18:23.978226 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/15ba8b2a-ed31-47c5-b655-efb44ceb0134-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-wd6f4\" (UID: \"15ba8b2a-ed31-47c5-b655-efb44ceb0134\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-wd6f4" Oct 07 08:18:24 crc kubenswrapper[4875]: I1007 08:18:24.080425 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/15ba8b2a-ed31-47c5-b655-efb44ceb0134-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-wd6f4\" (UID: \"15ba8b2a-ed31-47c5-b655-efb44ceb0134\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-wd6f4" Oct 07 08:18:24 crc kubenswrapper[4875]: I1007 08:18:24.080489 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4tz88\" (UniqueName: \"kubernetes.io/projected/15ba8b2a-ed31-47c5-b655-efb44ceb0134-kube-api-access-4tz88\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-wd6f4\" (UID: \"15ba8b2a-ed31-47c5-b655-efb44ceb0134\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-wd6f4" Oct 07 08:18:24 crc kubenswrapper[4875]: I1007 08:18:24.080532 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/15ba8b2a-ed31-47c5-b655-efb44ceb0134-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-wd6f4\" (UID: \"15ba8b2a-ed31-47c5-b655-efb44ceb0134\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-wd6f4" Oct 07 08:18:24 crc kubenswrapper[4875]: I1007 08:18:24.080582 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/15ba8b2a-ed31-47c5-b655-efb44ceb0134-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-wd6f4\" (UID: \"15ba8b2a-ed31-47c5-b655-efb44ceb0134\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-wd6f4" Oct 07 08:18:24 crc kubenswrapper[4875]: I1007 08:18:24.085647 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/15ba8b2a-ed31-47c5-b655-efb44ceb0134-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-wd6f4\" (UID: \"15ba8b2a-ed31-47c5-b655-efb44ceb0134\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-wd6f4" Oct 07 08:18:24 crc kubenswrapper[4875]: I1007 08:18:24.086406 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/15ba8b2a-ed31-47c5-b655-efb44ceb0134-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-wd6f4\" (UID: \"15ba8b2a-ed31-47c5-b655-efb44ceb0134\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-wd6f4" Oct 07 08:18:24 crc kubenswrapper[4875]: I1007 08:18:24.086968 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/15ba8b2a-ed31-47c5-b655-efb44ceb0134-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-wd6f4\" (UID: \"15ba8b2a-ed31-47c5-b655-efb44ceb0134\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-wd6f4" Oct 07 08:18:24 crc kubenswrapper[4875]: I1007 08:18:24.100771 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4tz88\" (UniqueName: \"kubernetes.io/projected/15ba8b2a-ed31-47c5-b655-efb44ceb0134-kube-api-access-4tz88\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-wd6f4\" (UID: \"15ba8b2a-ed31-47c5-b655-efb44ceb0134\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-wd6f4" Oct 07 08:18:24 crc kubenswrapper[4875]: I1007 08:18:24.270104 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-wd6f4" Oct 07 08:18:24 crc kubenswrapper[4875]: I1007 08:18:24.814127 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-wd6f4"] Oct 07 08:18:24 crc kubenswrapper[4875]: I1007 08:18:24.846567 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-wd6f4" event={"ID":"15ba8b2a-ed31-47c5-b655-efb44ceb0134","Type":"ContainerStarted","Data":"36d2f1311d81d484b1e20d7adedbaab5245ccec733c30115c3c9ca366bc9496d"} Oct 07 08:18:25 crc kubenswrapper[4875]: I1007 08:18:25.856720 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-wd6f4" event={"ID":"15ba8b2a-ed31-47c5-b655-efb44ceb0134","Type":"ContainerStarted","Data":"cd956fcbf87f8ea46040ff0d1ac3e084075ecd1eb325be550e86eefb89d25581"} Oct 07 08:18:25 crc kubenswrapper[4875]: I1007 08:18:25.879690 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-wd6f4" podStartSLOduration=2.153479912 podStartE2EDuration="2.87966873s" podCreationTimestamp="2025-10-07 08:18:23 +0000 UTC" firstStartedPulling="2025-10-07 08:18:24.823796179 +0000 UTC m=+1329.783566722" lastFinishedPulling="2025-10-07 08:18:25.549985007 +0000 UTC m=+1330.509755540" observedRunningTime="2025-10-07 08:18:25.87308002 +0000 UTC m=+1330.832850623" watchObservedRunningTime="2025-10-07 08:18:25.87966873 +0000 UTC m=+1330.839439263" Oct 07 08:18:31 crc kubenswrapper[4875]: I1007 08:18:31.221513 4875 patch_prober.go:28] interesting pod/machine-config-daemon-hx68m container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 07 08:18:31 crc kubenswrapper[4875]: I1007 08:18:31.223078 4875 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" podUID="3928c10c-c3da-41eb-96b2-629d67cfb31f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 07 08:18:31 crc kubenswrapper[4875]: I1007 08:18:31.223172 4875 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" Oct 07 08:18:31 crc kubenswrapper[4875]: I1007 08:18:31.223999 4875 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"55efb217a04e70bc6e7faba22ad468c623c75c0fd9c6f3ce56027a9559dc9a47"} pod="openshift-machine-config-operator/machine-config-daemon-hx68m" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 07 08:18:31 crc kubenswrapper[4875]: I1007 08:18:31.224072 4875 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" podUID="3928c10c-c3da-41eb-96b2-629d67cfb31f" containerName="machine-config-daemon" containerID="cri-o://55efb217a04e70bc6e7faba22ad468c623c75c0fd9c6f3ce56027a9559dc9a47" gracePeriod=600 Oct 07 08:18:31 crc kubenswrapper[4875]: I1007 08:18:31.919468 4875 generic.go:334] "Generic (PLEG): container finished" podID="3928c10c-c3da-41eb-96b2-629d67cfb31f" containerID="55efb217a04e70bc6e7faba22ad468c623c75c0fd9c6f3ce56027a9559dc9a47" exitCode=0 Oct 07 08:18:31 crc kubenswrapper[4875]: I1007 08:18:31.919566 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" event={"ID":"3928c10c-c3da-41eb-96b2-629d67cfb31f","Type":"ContainerDied","Data":"55efb217a04e70bc6e7faba22ad468c623c75c0fd9c6f3ce56027a9559dc9a47"} Oct 07 08:18:31 crc kubenswrapper[4875]: I1007 08:18:31.920467 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" event={"ID":"3928c10c-c3da-41eb-96b2-629d67cfb31f","Type":"ContainerStarted","Data":"e7a8b49c8a3c1b65fb51ee691d301a717bb3f37d230c7e4911b755a2a993aaef"} Oct 07 08:18:31 crc kubenswrapper[4875]: I1007 08:18:31.920500 4875 scope.go:117] "RemoveContainer" containerID="933ac15074ad9aef219ca9dd266d407f01a604b86a5e807215e08ee271925427" Oct 07 08:19:17 crc kubenswrapper[4875]: I1007 08:19:17.895574 4875 scope.go:117] "RemoveContainer" containerID="2c6b6ca2b005ca9e1551328abf58d560788b5aba40a402db21a8ae3a2c65da1e" Oct 07 08:19:17 crc kubenswrapper[4875]: I1007 08:19:17.948469 4875 scope.go:117] "RemoveContainer" containerID="6bd552c8dd28ca6b0e5a1af9046d0827447aba7150dd3cedd129f4d1583e8b0e" Oct 07 08:19:18 crc kubenswrapper[4875]: I1007 08:19:18.014988 4875 scope.go:117] "RemoveContainer" containerID="541c994fd394d6919cd0ba24e91df28102524f45378c6b8fac295753e60b09ba" Oct 07 08:20:18 crc kubenswrapper[4875]: I1007 08:20:18.123752 4875 scope.go:117] "RemoveContainer" containerID="81d856a9695463b7906f8eb2f780ac834435136db46dcf26d9332e4ae59c40ac" Oct 07 08:20:31 crc kubenswrapper[4875]: I1007 08:20:31.221428 4875 patch_prober.go:28] interesting pod/machine-config-daemon-hx68m container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 07 08:20:31 crc kubenswrapper[4875]: I1007 08:20:31.222108 4875 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" podUID="3928c10c-c3da-41eb-96b2-629d67cfb31f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 07 08:20:46 crc kubenswrapper[4875]: I1007 08:20:46.932343 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-m5kjk"] Oct 07 08:20:46 crc kubenswrapper[4875]: I1007 08:20:46.939047 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-m5kjk" Oct 07 08:20:46 crc kubenswrapper[4875]: I1007 08:20:46.954462 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-m5kjk"] Oct 07 08:20:47 crc kubenswrapper[4875]: I1007 08:20:47.055421 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fngzb\" (UniqueName: \"kubernetes.io/projected/d82c3777-3f4b-4649-9f74-0729afb5fbc9-kube-api-access-fngzb\") pod \"certified-operators-m5kjk\" (UID: \"d82c3777-3f4b-4649-9f74-0729afb5fbc9\") " pod="openshift-marketplace/certified-operators-m5kjk" Oct 07 08:20:47 crc kubenswrapper[4875]: I1007 08:20:47.056358 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d82c3777-3f4b-4649-9f74-0729afb5fbc9-catalog-content\") pod \"certified-operators-m5kjk\" (UID: \"d82c3777-3f4b-4649-9f74-0729afb5fbc9\") " pod="openshift-marketplace/certified-operators-m5kjk" Oct 07 08:20:47 crc kubenswrapper[4875]: I1007 08:20:47.056394 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d82c3777-3f4b-4649-9f74-0729afb5fbc9-utilities\") pod \"certified-operators-m5kjk\" (UID: \"d82c3777-3f4b-4649-9f74-0729afb5fbc9\") " pod="openshift-marketplace/certified-operators-m5kjk" Oct 07 08:20:47 crc kubenswrapper[4875]: I1007 08:20:47.158842 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d82c3777-3f4b-4649-9f74-0729afb5fbc9-catalog-content\") pod \"certified-operators-m5kjk\" (UID: \"d82c3777-3f4b-4649-9f74-0729afb5fbc9\") " pod="openshift-marketplace/certified-operators-m5kjk" Oct 07 08:20:47 crc kubenswrapper[4875]: I1007 08:20:47.158891 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d82c3777-3f4b-4649-9f74-0729afb5fbc9-utilities\") pod \"certified-operators-m5kjk\" (UID: \"d82c3777-3f4b-4649-9f74-0729afb5fbc9\") " pod="openshift-marketplace/certified-operators-m5kjk" Oct 07 08:20:47 crc kubenswrapper[4875]: I1007 08:20:47.158956 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fngzb\" (UniqueName: \"kubernetes.io/projected/d82c3777-3f4b-4649-9f74-0729afb5fbc9-kube-api-access-fngzb\") pod \"certified-operators-m5kjk\" (UID: \"d82c3777-3f4b-4649-9f74-0729afb5fbc9\") " pod="openshift-marketplace/certified-operators-m5kjk" Oct 07 08:20:47 crc kubenswrapper[4875]: I1007 08:20:47.159523 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d82c3777-3f4b-4649-9f74-0729afb5fbc9-catalog-content\") pod \"certified-operators-m5kjk\" (UID: \"d82c3777-3f4b-4649-9f74-0729afb5fbc9\") " pod="openshift-marketplace/certified-operators-m5kjk" Oct 07 08:20:47 crc kubenswrapper[4875]: I1007 08:20:47.159639 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d82c3777-3f4b-4649-9f74-0729afb5fbc9-utilities\") pod \"certified-operators-m5kjk\" (UID: \"d82c3777-3f4b-4649-9f74-0729afb5fbc9\") " pod="openshift-marketplace/certified-operators-m5kjk" Oct 07 08:20:47 crc kubenswrapper[4875]: I1007 08:20:47.189019 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fngzb\" (UniqueName: \"kubernetes.io/projected/d82c3777-3f4b-4649-9f74-0729afb5fbc9-kube-api-access-fngzb\") pod \"certified-operators-m5kjk\" (UID: \"d82c3777-3f4b-4649-9f74-0729afb5fbc9\") " pod="openshift-marketplace/certified-operators-m5kjk" Oct 07 08:20:47 crc kubenswrapper[4875]: I1007 08:20:47.266675 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-m5kjk" Oct 07 08:20:47 crc kubenswrapper[4875]: I1007 08:20:47.871351 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-m5kjk"] Oct 07 08:20:48 crc kubenswrapper[4875]: I1007 08:20:48.343768 4875 generic.go:334] "Generic (PLEG): container finished" podID="d82c3777-3f4b-4649-9f74-0729afb5fbc9" containerID="0af252ded9ca855f5d158a6749b7b8af3fb893d33ff95c2866eb46e6cc003c20" exitCode=0 Oct 07 08:20:48 crc kubenswrapper[4875]: I1007 08:20:48.343846 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-m5kjk" event={"ID":"d82c3777-3f4b-4649-9f74-0729afb5fbc9","Type":"ContainerDied","Data":"0af252ded9ca855f5d158a6749b7b8af3fb893d33ff95c2866eb46e6cc003c20"} Oct 07 08:20:48 crc kubenswrapper[4875]: I1007 08:20:48.344116 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-m5kjk" event={"ID":"d82c3777-3f4b-4649-9f74-0729afb5fbc9","Type":"ContainerStarted","Data":"ee6b7696bc9fc1847d2c4df47c82af18dbf386c1d1c51b6e41ee38c442e703da"} Oct 07 08:20:50 crc kubenswrapper[4875]: I1007 08:20:50.374717 4875 generic.go:334] "Generic (PLEG): container finished" podID="d82c3777-3f4b-4649-9f74-0729afb5fbc9" containerID="51a18688e465d92e20f882e1ba8be7a83668a4490f60d01a40845db1079e0650" exitCode=0 Oct 07 08:20:50 crc kubenswrapper[4875]: I1007 08:20:50.374836 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-m5kjk" event={"ID":"d82c3777-3f4b-4649-9f74-0729afb5fbc9","Type":"ContainerDied","Data":"51a18688e465d92e20f882e1ba8be7a83668a4490f60d01a40845db1079e0650"} Oct 07 08:20:51 crc kubenswrapper[4875]: I1007 08:20:51.402708 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-m5kjk" event={"ID":"d82c3777-3f4b-4649-9f74-0729afb5fbc9","Type":"ContainerStarted","Data":"9ad1c18ea7a9d516a141aa694f5578561dd1c1139b47bbab531ef17d33540ece"} Oct 07 08:20:51 crc kubenswrapper[4875]: I1007 08:20:51.427572 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-m5kjk" podStartSLOduration=2.979526014 podStartE2EDuration="5.427549192s" podCreationTimestamp="2025-10-07 08:20:46 +0000 UTC" firstStartedPulling="2025-10-07 08:20:48.347938511 +0000 UTC m=+1473.307709054" lastFinishedPulling="2025-10-07 08:20:50.795961679 +0000 UTC m=+1475.755732232" observedRunningTime="2025-10-07 08:20:51.425331731 +0000 UTC m=+1476.385102294" watchObservedRunningTime="2025-10-07 08:20:51.427549192 +0000 UTC m=+1476.387319735" Oct 07 08:20:54 crc kubenswrapper[4875]: I1007 08:20:54.230433 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-27ngk"] Oct 07 08:20:54 crc kubenswrapper[4875]: I1007 08:20:54.232604 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-27ngk" Oct 07 08:20:54 crc kubenswrapper[4875]: I1007 08:20:54.248672 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-27ngk"] Oct 07 08:20:54 crc kubenswrapper[4875]: I1007 08:20:54.327993 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1b15bb23-8d95-4bd6-aae0-9c0220bcc05b-catalog-content\") pod \"community-operators-27ngk\" (UID: \"1b15bb23-8d95-4bd6-aae0-9c0220bcc05b\") " pod="openshift-marketplace/community-operators-27ngk" Oct 07 08:20:54 crc kubenswrapper[4875]: I1007 08:20:54.328085 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1b15bb23-8d95-4bd6-aae0-9c0220bcc05b-utilities\") pod \"community-operators-27ngk\" (UID: \"1b15bb23-8d95-4bd6-aae0-9c0220bcc05b\") " pod="openshift-marketplace/community-operators-27ngk" Oct 07 08:20:54 crc kubenswrapper[4875]: I1007 08:20:54.328172 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6zc5p\" (UniqueName: \"kubernetes.io/projected/1b15bb23-8d95-4bd6-aae0-9c0220bcc05b-kube-api-access-6zc5p\") pod \"community-operators-27ngk\" (UID: \"1b15bb23-8d95-4bd6-aae0-9c0220bcc05b\") " pod="openshift-marketplace/community-operators-27ngk" Oct 07 08:20:54 crc kubenswrapper[4875]: I1007 08:20:54.430690 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6zc5p\" (UniqueName: \"kubernetes.io/projected/1b15bb23-8d95-4bd6-aae0-9c0220bcc05b-kube-api-access-6zc5p\") pod \"community-operators-27ngk\" (UID: \"1b15bb23-8d95-4bd6-aae0-9c0220bcc05b\") " pod="openshift-marketplace/community-operators-27ngk" Oct 07 08:20:54 crc kubenswrapper[4875]: I1007 08:20:54.431346 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1b15bb23-8d95-4bd6-aae0-9c0220bcc05b-catalog-content\") pod \"community-operators-27ngk\" (UID: \"1b15bb23-8d95-4bd6-aae0-9c0220bcc05b\") " pod="openshift-marketplace/community-operators-27ngk" Oct 07 08:20:54 crc kubenswrapper[4875]: I1007 08:20:54.431434 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1b15bb23-8d95-4bd6-aae0-9c0220bcc05b-utilities\") pod \"community-operators-27ngk\" (UID: \"1b15bb23-8d95-4bd6-aae0-9c0220bcc05b\") " pod="openshift-marketplace/community-operators-27ngk" Oct 07 08:20:54 crc kubenswrapper[4875]: I1007 08:20:54.432023 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1b15bb23-8d95-4bd6-aae0-9c0220bcc05b-catalog-content\") pod \"community-operators-27ngk\" (UID: \"1b15bb23-8d95-4bd6-aae0-9c0220bcc05b\") " pod="openshift-marketplace/community-operators-27ngk" Oct 07 08:20:54 crc kubenswrapper[4875]: I1007 08:20:54.432177 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1b15bb23-8d95-4bd6-aae0-9c0220bcc05b-utilities\") pod \"community-operators-27ngk\" (UID: \"1b15bb23-8d95-4bd6-aae0-9c0220bcc05b\") " pod="openshift-marketplace/community-operators-27ngk" Oct 07 08:20:54 crc kubenswrapper[4875]: I1007 08:20:54.457036 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6zc5p\" (UniqueName: \"kubernetes.io/projected/1b15bb23-8d95-4bd6-aae0-9c0220bcc05b-kube-api-access-6zc5p\") pod \"community-operators-27ngk\" (UID: \"1b15bb23-8d95-4bd6-aae0-9c0220bcc05b\") " pod="openshift-marketplace/community-operators-27ngk" Oct 07 08:20:54 crc kubenswrapper[4875]: I1007 08:20:54.582357 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-27ngk" Oct 07 08:20:55 crc kubenswrapper[4875]: I1007 08:20:55.180716 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-27ngk"] Oct 07 08:20:55 crc kubenswrapper[4875]: I1007 08:20:55.452981 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-27ngk" event={"ID":"1b15bb23-8d95-4bd6-aae0-9c0220bcc05b","Type":"ContainerStarted","Data":"34ab87d2170f2c4cc44efff60c098dfe917f6e94be293bf9d9a1864a45ab5d71"} Oct 07 08:20:56 crc kubenswrapper[4875]: I1007 08:20:56.468574 4875 generic.go:334] "Generic (PLEG): container finished" podID="1b15bb23-8d95-4bd6-aae0-9c0220bcc05b" containerID="6668a1d7eaf54d379753a2f0469ea0d26675ad9c817b1932dcb13267f19cadf4" exitCode=0 Oct 07 08:20:56 crc kubenswrapper[4875]: I1007 08:20:56.468696 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-27ngk" event={"ID":"1b15bb23-8d95-4bd6-aae0-9c0220bcc05b","Type":"ContainerDied","Data":"6668a1d7eaf54d379753a2f0469ea0d26675ad9c817b1932dcb13267f19cadf4"} Oct 07 08:20:57 crc kubenswrapper[4875]: I1007 08:20:57.267368 4875 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-m5kjk" Oct 07 08:20:57 crc kubenswrapper[4875]: I1007 08:20:57.267430 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-m5kjk" Oct 07 08:20:57 crc kubenswrapper[4875]: I1007 08:20:57.348115 4875 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-m5kjk" Oct 07 08:20:57 crc kubenswrapper[4875]: I1007 08:20:57.485204 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-27ngk" event={"ID":"1b15bb23-8d95-4bd6-aae0-9c0220bcc05b","Type":"ContainerStarted","Data":"36bd4c025746e649b450103d7ffc83d289f10676cac358e06977741b5ff08690"} Oct 07 08:20:57 crc kubenswrapper[4875]: I1007 08:20:57.561563 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-m5kjk" Oct 07 08:20:58 crc kubenswrapper[4875]: I1007 08:20:58.504676 4875 generic.go:334] "Generic (PLEG): container finished" podID="1b15bb23-8d95-4bd6-aae0-9c0220bcc05b" containerID="36bd4c025746e649b450103d7ffc83d289f10676cac358e06977741b5ff08690" exitCode=0 Oct 07 08:20:58 crc kubenswrapper[4875]: I1007 08:20:58.504802 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-27ngk" event={"ID":"1b15bb23-8d95-4bd6-aae0-9c0220bcc05b","Type":"ContainerDied","Data":"36bd4c025746e649b450103d7ffc83d289f10676cac358e06977741b5ff08690"} Oct 07 08:20:59 crc kubenswrapper[4875]: I1007 08:20:59.537546 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-27ngk" event={"ID":"1b15bb23-8d95-4bd6-aae0-9c0220bcc05b","Type":"ContainerStarted","Data":"ae406dd38a834da556a799b58513720563d403738f2233e841277526cc09c399"} Oct 07 08:20:59 crc kubenswrapper[4875]: I1007 08:20:59.565315 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-27ngk" podStartSLOduration=3.052227104 podStartE2EDuration="5.565288314s" podCreationTimestamp="2025-10-07 08:20:54 +0000 UTC" firstStartedPulling="2025-10-07 08:20:56.471793091 +0000 UTC m=+1481.431563634" lastFinishedPulling="2025-10-07 08:20:58.984854301 +0000 UTC m=+1483.944624844" observedRunningTime="2025-10-07 08:20:59.561378019 +0000 UTC m=+1484.521148562" watchObservedRunningTime="2025-10-07 08:20:59.565288314 +0000 UTC m=+1484.525058857" Oct 07 08:20:59 crc kubenswrapper[4875]: I1007 08:20:59.599190 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-m5kjk"] Oct 07 08:20:59 crc kubenswrapper[4875]: I1007 08:20:59.599598 4875 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-m5kjk" podUID="d82c3777-3f4b-4649-9f74-0729afb5fbc9" containerName="registry-server" containerID="cri-o://9ad1c18ea7a9d516a141aa694f5578561dd1c1139b47bbab531ef17d33540ece" gracePeriod=2 Oct 07 08:21:00 crc kubenswrapper[4875]: I1007 08:21:00.089280 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-m5kjk" Oct 07 08:21:00 crc kubenswrapper[4875]: I1007 08:21:00.188078 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d82c3777-3f4b-4649-9f74-0729afb5fbc9-utilities\") pod \"d82c3777-3f4b-4649-9f74-0729afb5fbc9\" (UID: \"d82c3777-3f4b-4649-9f74-0729afb5fbc9\") " Oct 07 08:21:00 crc kubenswrapper[4875]: I1007 08:21:00.188283 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fngzb\" (UniqueName: \"kubernetes.io/projected/d82c3777-3f4b-4649-9f74-0729afb5fbc9-kube-api-access-fngzb\") pod \"d82c3777-3f4b-4649-9f74-0729afb5fbc9\" (UID: \"d82c3777-3f4b-4649-9f74-0729afb5fbc9\") " Oct 07 08:21:00 crc kubenswrapper[4875]: I1007 08:21:00.188463 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d82c3777-3f4b-4649-9f74-0729afb5fbc9-catalog-content\") pod \"d82c3777-3f4b-4649-9f74-0729afb5fbc9\" (UID: \"d82c3777-3f4b-4649-9f74-0729afb5fbc9\") " Oct 07 08:21:00 crc kubenswrapper[4875]: I1007 08:21:00.188999 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d82c3777-3f4b-4649-9f74-0729afb5fbc9-utilities" (OuterVolumeSpecName: "utilities") pod "d82c3777-3f4b-4649-9f74-0729afb5fbc9" (UID: "d82c3777-3f4b-4649-9f74-0729afb5fbc9"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 08:21:00 crc kubenswrapper[4875]: I1007 08:21:00.197231 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d82c3777-3f4b-4649-9f74-0729afb5fbc9-kube-api-access-fngzb" (OuterVolumeSpecName: "kube-api-access-fngzb") pod "d82c3777-3f4b-4649-9f74-0729afb5fbc9" (UID: "d82c3777-3f4b-4649-9f74-0729afb5fbc9"). InnerVolumeSpecName "kube-api-access-fngzb". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 08:21:00 crc kubenswrapper[4875]: I1007 08:21:00.239798 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d82c3777-3f4b-4649-9f74-0729afb5fbc9-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "d82c3777-3f4b-4649-9f74-0729afb5fbc9" (UID: "d82c3777-3f4b-4649-9f74-0729afb5fbc9"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 08:21:00 crc kubenswrapper[4875]: I1007 08:21:00.291499 4875 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d82c3777-3f4b-4649-9f74-0729afb5fbc9-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 07 08:21:00 crc kubenswrapper[4875]: I1007 08:21:00.291551 4875 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d82c3777-3f4b-4649-9f74-0729afb5fbc9-utilities\") on node \"crc\" DevicePath \"\"" Oct 07 08:21:00 crc kubenswrapper[4875]: I1007 08:21:00.291566 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fngzb\" (UniqueName: \"kubernetes.io/projected/d82c3777-3f4b-4649-9f74-0729afb5fbc9-kube-api-access-fngzb\") on node \"crc\" DevicePath \"\"" Oct 07 08:21:00 crc kubenswrapper[4875]: I1007 08:21:00.550691 4875 generic.go:334] "Generic (PLEG): container finished" podID="d82c3777-3f4b-4649-9f74-0729afb5fbc9" containerID="9ad1c18ea7a9d516a141aa694f5578561dd1c1139b47bbab531ef17d33540ece" exitCode=0 Oct 07 08:21:00 crc kubenswrapper[4875]: I1007 08:21:00.550734 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-m5kjk" event={"ID":"d82c3777-3f4b-4649-9f74-0729afb5fbc9","Type":"ContainerDied","Data":"9ad1c18ea7a9d516a141aa694f5578561dd1c1139b47bbab531ef17d33540ece"} Oct 07 08:21:00 crc kubenswrapper[4875]: I1007 08:21:00.550789 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-m5kjk" event={"ID":"d82c3777-3f4b-4649-9f74-0729afb5fbc9","Type":"ContainerDied","Data":"ee6b7696bc9fc1847d2c4df47c82af18dbf386c1d1c51b6e41ee38c442e703da"} Oct 07 08:21:00 crc kubenswrapper[4875]: I1007 08:21:00.550807 4875 scope.go:117] "RemoveContainer" containerID="9ad1c18ea7a9d516a141aa694f5578561dd1c1139b47bbab531ef17d33540ece" Oct 07 08:21:00 crc kubenswrapper[4875]: I1007 08:21:00.551947 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-m5kjk" Oct 07 08:21:00 crc kubenswrapper[4875]: I1007 08:21:00.571042 4875 scope.go:117] "RemoveContainer" containerID="51a18688e465d92e20f882e1ba8be7a83668a4490f60d01a40845db1079e0650" Oct 07 08:21:00 crc kubenswrapper[4875]: I1007 08:21:00.590548 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-m5kjk"] Oct 07 08:21:00 crc kubenswrapper[4875]: I1007 08:21:00.599420 4875 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-m5kjk"] Oct 07 08:21:00 crc kubenswrapper[4875]: I1007 08:21:00.613245 4875 scope.go:117] "RemoveContainer" containerID="0af252ded9ca855f5d158a6749b7b8af3fb893d33ff95c2866eb46e6cc003c20" Oct 07 08:21:00 crc kubenswrapper[4875]: I1007 08:21:00.651903 4875 scope.go:117] "RemoveContainer" containerID="9ad1c18ea7a9d516a141aa694f5578561dd1c1139b47bbab531ef17d33540ece" Oct 07 08:21:00 crc kubenswrapper[4875]: E1007 08:21:00.652426 4875 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9ad1c18ea7a9d516a141aa694f5578561dd1c1139b47bbab531ef17d33540ece\": container with ID starting with 9ad1c18ea7a9d516a141aa694f5578561dd1c1139b47bbab531ef17d33540ece not found: ID does not exist" containerID="9ad1c18ea7a9d516a141aa694f5578561dd1c1139b47bbab531ef17d33540ece" Oct 07 08:21:00 crc kubenswrapper[4875]: I1007 08:21:00.652481 4875 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9ad1c18ea7a9d516a141aa694f5578561dd1c1139b47bbab531ef17d33540ece"} err="failed to get container status \"9ad1c18ea7a9d516a141aa694f5578561dd1c1139b47bbab531ef17d33540ece\": rpc error: code = NotFound desc = could not find container \"9ad1c18ea7a9d516a141aa694f5578561dd1c1139b47bbab531ef17d33540ece\": container with ID starting with 9ad1c18ea7a9d516a141aa694f5578561dd1c1139b47bbab531ef17d33540ece not found: ID does not exist" Oct 07 08:21:00 crc kubenswrapper[4875]: I1007 08:21:00.652518 4875 scope.go:117] "RemoveContainer" containerID="51a18688e465d92e20f882e1ba8be7a83668a4490f60d01a40845db1079e0650" Oct 07 08:21:00 crc kubenswrapper[4875]: E1007 08:21:00.652942 4875 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"51a18688e465d92e20f882e1ba8be7a83668a4490f60d01a40845db1079e0650\": container with ID starting with 51a18688e465d92e20f882e1ba8be7a83668a4490f60d01a40845db1079e0650 not found: ID does not exist" containerID="51a18688e465d92e20f882e1ba8be7a83668a4490f60d01a40845db1079e0650" Oct 07 08:21:00 crc kubenswrapper[4875]: I1007 08:21:00.653072 4875 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"51a18688e465d92e20f882e1ba8be7a83668a4490f60d01a40845db1079e0650"} err="failed to get container status \"51a18688e465d92e20f882e1ba8be7a83668a4490f60d01a40845db1079e0650\": rpc error: code = NotFound desc = could not find container \"51a18688e465d92e20f882e1ba8be7a83668a4490f60d01a40845db1079e0650\": container with ID starting with 51a18688e465d92e20f882e1ba8be7a83668a4490f60d01a40845db1079e0650 not found: ID does not exist" Oct 07 08:21:00 crc kubenswrapper[4875]: I1007 08:21:00.653271 4875 scope.go:117] "RemoveContainer" containerID="0af252ded9ca855f5d158a6749b7b8af3fb893d33ff95c2866eb46e6cc003c20" Oct 07 08:21:00 crc kubenswrapper[4875]: E1007 08:21:00.653727 4875 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0af252ded9ca855f5d158a6749b7b8af3fb893d33ff95c2866eb46e6cc003c20\": container with ID starting with 0af252ded9ca855f5d158a6749b7b8af3fb893d33ff95c2866eb46e6cc003c20 not found: ID does not exist" containerID="0af252ded9ca855f5d158a6749b7b8af3fb893d33ff95c2866eb46e6cc003c20" Oct 07 08:21:00 crc kubenswrapper[4875]: I1007 08:21:00.653796 4875 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0af252ded9ca855f5d158a6749b7b8af3fb893d33ff95c2866eb46e6cc003c20"} err="failed to get container status \"0af252ded9ca855f5d158a6749b7b8af3fb893d33ff95c2866eb46e6cc003c20\": rpc error: code = NotFound desc = could not find container \"0af252ded9ca855f5d158a6749b7b8af3fb893d33ff95c2866eb46e6cc003c20\": container with ID starting with 0af252ded9ca855f5d158a6749b7b8af3fb893d33ff95c2866eb46e6cc003c20 not found: ID does not exist" Oct 07 08:21:01 crc kubenswrapper[4875]: I1007 08:21:01.221079 4875 patch_prober.go:28] interesting pod/machine-config-daemon-hx68m container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 07 08:21:01 crc kubenswrapper[4875]: I1007 08:21:01.221150 4875 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" podUID="3928c10c-c3da-41eb-96b2-629d67cfb31f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 07 08:21:01 crc kubenswrapper[4875]: I1007 08:21:01.714417 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d82c3777-3f4b-4649-9f74-0729afb5fbc9" path="/var/lib/kubelet/pods/d82c3777-3f4b-4649-9f74-0729afb5fbc9/volumes" Oct 07 08:21:03 crc kubenswrapper[4875]: I1007 08:21:03.804846 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-6jvqw"] Oct 07 08:21:03 crc kubenswrapper[4875]: E1007 08:21:03.805782 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d82c3777-3f4b-4649-9f74-0729afb5fbc9" containerName="registry-server" Oct 07 08:21:03 crc kubenswrapper[4875]: I1007 08:21:03.805801 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="d82c3777-3f4b-4649-9f74-0729afb5fbc9" containerName="registry-server" Oct 07 08:21:03 crc kubenswrapper[4875]: E1007 08:21:03.805826 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d82c3777-3f4b-4649-9f74-0729afb5fbc9" containerName="extract-content" Oct 07 08:21:03 crc kubenswrapper[4875]: I1007 08:21:03.805834 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="d82c3777-3f4b-4649-9f74-0729afb5fbc9" containerName="extract-content" Oct 07 08:21:03 crc kubenswrapper[4875]: E1007 08:21:03.805853 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d82c3777-3f4b-4649-9f74-0729afb5fbc9" containerName="extract-utilities" Oct 07 08:21:03 crc kubenswrapper[4875]: I1007 08:21:03.805861 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="d82c3777-3f4b-4649-9f74-0729afb5fbc9" containerName="extract-utilities" Oct 07 08:21:03 crc kubenswrapper[4875]: I1007 08:21:03.806092 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="d82c3777-3f4b-4649-9f74-0729afb5fbc9" containerName="registry-server" Oct 07 08:21:03 crc kubenswrapper[4875]: I1007 08:21:03.808606 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-6jvqw" Oct 07 08:21:03 crc kubenswrapper[4875]: I1007 08:21:03.823450 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-6jvqw"] Oct 07 08:21:03 crc kubenswrapper[4875]: I1007 08:21:03.978622 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5a8be590-25cd-4d86-b2df-eb5906b3ba2e-utilities\") pod \"redhat-marketplace-6jvqw\" (UID: \"5a8be590-25cd-4d86-b2df-eb5906b3ba2e\") " pod="openshift-marketplace/redhat-marketplace-6jvqw" Oct 07 08:21:03 crc kubenswrapper[4875]: I1007 08:21:03.978712 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zng29\" (UniqueName: \"kubernetes.io/projected/5a8be590-25cd-4d86-b2df-eb5906b3ba2e-kube-api-access-zng29\") pod \"redhat-marketplace-6jvqw\" (UID: \"5a8be590-25cd-4d86-b2df-eb5906b3ba2e\") " pod="openshift-marketplace/redhat-marketplace-6jvqw" Oct 07 08:21:03 crc kubenswrapper[4875]: I1007 08:21:03.978774 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5a8be590-25cd-4d86-b2df-eb5906b3ba2e-catalog-content\") pod \"redhat-marketplace-6jvqw\" (UID: \"5a8be590-25cd-4d86-b2df-eb5906b3ba2e\") " pod="openshift-marketplace/redhat-marketplace-6jvqw" Oct 07 08:21:04 crc kubenswrapper[4875]: I1007 08:21:04.080235 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5a8be590-25cd-4d86-b2df-eb5906b3ba2e-utilities\") pod \"redhat-marketplace-6jvqw\" (UID: \"5a8be590-25cd-4d86-b2df-eb5906b3ba2e\") " pod="openshift-marketplace/redhat-marketplace-6jvqw" Oct 07 08:21:04 crc kubenswrapper[4875]: I1007 08:21:04.080309 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zng29\" (UniqueName: \"kubernetes.io/projected/5a8be590-25cd-4d86-b2df-eb5906b3ba2e-kube-api-access-zng29\") pod \"redhat-marketplace-6jvqw\" (UID: \"5a8be590-25cd-4d86-b2df-eb5906b3ba2e\") " pod="openshift-marketplace/redhat-marketplace-6jvqw" Oct 07 08:21:04 crc kubenswrapper[4875]: I1007 08:21:04.080337 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5a8be590-25cd-4d86-b2df-eb5906b3ba2e-catalog-content\") pod \"redhat-marketplace-6jvqw\" (UID: \"5a8be590-25cd-4d86-b2df-eb5906b3ba2e\") " pod="openshift-marketplace/redhat-marketplace-6jvqw" Oct 07 08:21:04 crc kubenswrapper[4875]: I1007 08:21:04.080820 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5a8be590-25cd-4d86-b2df-eb5906b3ba2e-utilities\") pod \"redhat-marketplace-6jvqw\" (UID: \"5a8be590-25cd-4d86-b2df-eb5906b3ba2e\") " pod="openshift-marketplace/redhat-marketplace-6jvqw" Oct 07 08:21:04 crc kubenswrapper[4875]: I1007 08:21:04.080845 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5a8be590-25cd-4d86-b2df-eb5906b3ba2e-catalog-content\") pod \"redhat-marketplace-6jvqw\" (UID: \"5a8be590-25cd-4d86-b2df-eb5906b3ba2e\") " pod="openshift-marketplace/redhat-marketplace-6jvqw" Oct 07 08:21:04 crc kubenswrapper[4875]: I1007 08:21:04.101000 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zng29\" (UniqueName: \"kubernetes.io/projected/5a8be590-25cd-4d86-b2df-eb5906b3ba2e-kube-api-access-zng29\") pod \"redhat-marketplace-6jvqw\" (UID: \"5a8be590-25cd-4d86-b2df-eb5906b3ba2e\") " pod="openshift-marketplace/redhat-marketplace-6jvqw" Oct 07 08:21:04 crc kubenswrapper[4875]: I1007 08:21:04.166100 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-6jvqw" Oct 07 08:21:04 crc kubenswrapper[4875]: I1007 08:21:04.583204 4875 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-27ngk" Oct 07 08:21:04 crc kubenswrapper[4875]: I1007 08:21:04.583286 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-27ngk" Oct 07 08:21:04 crc kubenswrapper[4875]: I1007 08:21:04.635482 4875 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-27ngk" Oct 07 08:21:04 crc kubenswrapper[4875]: I1007 08:21:04.687046 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-6jvqw"] Oct 07 08:21:04 crc kubenswrapper[4875]: I1007 08:21:04.700439 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-27ngk" Oct 07 08:21:05 crc kubenswrapper[4875]: I1007 08:21:05.629990 4875 generic.go:334] "Generic (PLEG): container finished" podID="5a8be590-25cd-4d86-b2df-eb5906b3ba2e" containerID="81b0c7b667bcd837eb0dfb73cc81182fe8247075fffb13670ad2990a8baf2a2b" exitCode=0 Oct 07 08:21:05 crc kubenswrapper[4875]: I1007 08:21:05.630121 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6jvqw" event={"ID":"5a8be590-25cd-4d86-b2df-eb5906b3ba2e","Type":"ContainerDied","Data":"81b0c7b667bcd837eb0dfb73cc81182fe8247075fffb13670ad2990a8baf2a2b"} Oct 07 08:21:05 crc kubenswrapper[4875]: I1007 08:21:05.630578 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6jvqw" event={"ID":"5a8be590-25cd-4d86-b2df-eb5906b3ba2e","Type":"ContainerStarted","Data":"468198b602e461d6247de525fed4cb439db35654152e3c7e2cd05194f0864792"} Oct 07 08:21:06 crc kubenswrapper[4875]: I1007 08:21:06.659170 4875 generic.go:334] "Generic (PLEG): container finished" podID="5a8be590-25cd-4d86-b2df-eb5906b3ba2e" containerID="dd7cf378e12b31f573d06347f62a33403e4c7ee17f9084bb5d49147073da2123" exitCode=0 Oct 07 08:21:06 crc kubenswrapper[4875]: I1007 08:21:06.659584 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6jvqw" event={"ID":"5a8be590-25cd-4d86-b2df-eb5906b3ba2e","Type":"ContainerDied","Data":"dd7cf378e12b31f573d06347f62a33403e4c7ee17f9084bb5d49147073da2123"} Oct 07 08:21:07 crc kubenswrapper[4875]: I1007 08:21:07.672575 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6jvqw" event={"ID":"5a8be590-25cd-4d86-b2df-eb5906b3ba2e","Type":"ContainerStarted","Data":"5e912bdc4d1b147bf8cf1f2c853497db325dc820e1fd9599ca503a17af485172"} Oct 07 08:21:07 crc kubenswrapper[4875]: I1007 08:21:07.691850 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-6jvqw" podStartSLOduration=3.173430256 podStartE2EDuration="4.691828768s" podCreationTimestamp="2025-10-07 08:21:03 +0000 UTC" firstStartedPulling="2025-10-07 08:21:05.631711567 +0000 UTC m=+1490.591482150" lastFinishedPulling="2025-10-07 08:21:07.150110099 +0000 UTC m=+1492.109880662" observedRunningTime="2025-10-07 08:21:07.689211645 +0000 UTC m=+1492.648982198" watchObservedRunningTime="2025-10-07 08:21:07.691828768 +0000 UTC m=+1492.651599321" Oct 07 08:21:07 crc kubenswrapper[4875]: I1007 08:21:07.999025 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-27ngk"] Oct 07 08:21:07 crc kubenswrapper[4875]: I1007 08:21:07.999308 4875 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-27ngk" podUID="1b15bb23-8d95-4bd6-aae0-9c0220bcc05b" containerName="registry-server" containerID="cri-o://ae406dd38a834da556a799b58513720563d403738f2233e841277526cc09c399" gracePeriod=2 Oct 07 08:21:08 crc kubenswrapper[4875]: I1007 08:21:08.448156 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-27ngk" Oct 07 08:21:08 crc kubenswrapper[4875]: I1007 08:21:08.572867 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6zc5p\" (UniqueName: \"kubernetes.io/projected/1b15bb23-8d95-4bd6-aae0-9c0220bcc05b-kube-api-access-6zc5p\") pod \"1b15bb23-8d95-4bd6-aae0-9c0220bcc05b\" (UID: \"1b15bb23-8d95-4bd6-aae0-9c0220bcc05b\") " Oct 07 08:21:08 crc kubenswrapper[4875]: I1007 08:21:08.573853 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1b15bb23-8d95-4bd6-aae0-9c0220bcc05b-utilities\") pod \"1b15bb23-8d95-4bd6-aae0-9c0220bcc05b\" (UID: \"1b15bb23-8d95-4bd6-aae0-9c0220bcc05b\") " Oct 07 08:21:08 crc kubenswrapper[4875]: I1007 08:21:08.574419 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1b15bb23-8d95-4bd6-aae0-9c0220bcc05b-catalog-content\") pod \"1b15bb23-8d95-4bd6-aae0-9c0220bcc05b\" (UID: \"1b15bb23-8d95-4bd6-aae0-9c0220bcc05b\") " Oct 07 08:21:08 crc kubenswrapper[4875]: I1007 08:21:08.574822 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1b15bb23-8d95-4bd6-aae0-9c0220bcc05b-utilities" (OuterVolumeSpecName: "utilities") pod "1b15bb23-8d95-4bd6-aae0-9c0220bcc05b" (UID: "1b15bb23-8d95-4bd6-aae0-9c0220bcc05b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 08:21:08 crc kubenswrapper[4875]: I1007 08:21:08.577084 4875 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1b15bb23-8d95-4bd6-aae0-9c0220bcc05b-utilities\") on node \"crc\" DevicePath \"\"" Oct 07 08:21:08 crc kubenswrapper[4875]: I1007 08:21:08.585235 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1b15bb23-8d95-4bd6-aae0-9c0220bcc05b-kube-api-access-6zc5p" (OuterVolumeSpecName: "kube-api-access-6zc5p") pod "1b15bb23-8d95-4bd6-aae0-9c0220bcc05b" (UID: "1b15bb23-8d95-4bd6-aae0-9c0220bcc05b"). InnerVolumeSpecName "kube-api-access-6zc5p". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 08:21:08 crc kubenswrapper[4875]: I1007 08:21:08.642260 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1b15bb23-8d95-4bd6-aae0-9c0220bcc05b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1b15bb23-8d95-4bd6-aae0-9c0220bcc05b" (UID: "1b15bb23-8d95-4bd6-aae0-9c0220bcc05b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 08:21:08 crc kubenswrapper[4875]: I1007 08:21:08.680018 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6zc5p\" (UniqueName: \"kubernetes.io/projected/1b15bb23-8d95-4bd6-aae0-9c0220bcc05b-kube-api-access-6zc5p\") on node \"crc\" DevicePath \"\"" Oct 07 08:21:08 crc kubenswrapper[4875]: I1007 08:21:08.680052 4875 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1b15bb23-8d95-4bd6-aae0-9c0220bcc05b-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 07 08:21:08 crc kubenswrapper[4875]: I1007 08:21:08.687785 4875 generic.go:334] "Generic (PLEG): container finished" podID="1b15bb23-8d95-4bd6-aae0-9c0220bcc05b" containerID="ae406dd38a834da556a799b58513720563d403738f2233e841277526cc09c399" exitCode=0 Oct 07 08:21:08 crc kubenswrapper[4875]: I1007 08:21:08.687846 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-27ngk" Oct 07 08:21:08 crc kubenswrapper[4875]: I1007 08:21:08.687859 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-27ngk" event={"ID":"1b15bb23-8d95-4bd6-aae0-9c0220bcc05b","Type":"ContainerDied","Data":"ae406dd38a834da556a799b58513720563d403738f2233e841277526cc09c399"} Oct 07 08:21:08 crc kubenswrapper[4875]: I1007 08:21:08.687961 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-27ngk" event={"ID":"1b15bb23-8d95-4bd6-aae0-9c0220bcc05b","Type":"ContainerDied","Data":"34ab87d2170f2c4cc44efff60c098dfe917f6e94be293bf9d9a1864a45ab5d71"} Oct 07 08:21:08 crc kubenswrapper[4875]: I1007 08:21:08.687980 4875 scope.go:117] "RemoveContainer" containerID="ae406dd38a834da556a799b58513720563d403738f2233e841277526cc09c399" Oct 07 08:21:08 crc kubenswrapper[4875]: I1007 08:21:08.723403 4875 scope.go:117] "RemoveContainer" containerID="36bd4c025746e649b450103d7ffc83d289f10676cac358e06977741b5ff08690" Oct 07 08:21:08 crc kubenswrapper[4875]: I1007 08:21:08.727821 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-27ngk"] Oct 07 08:21:08 crc kubenswrapper[4875]: I1007 08:21:08.738418 4875 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-27ngk"] Oct 07 08:21:08 crc kubenswrapper[4875]: I1007 08:21:08.747255 4875 scope.go:117] "RemoveContainer" containerID="6668a1d7eaf54d379753a2f0469ea0d26675ad9c817b1932dcb13267f19cadf4" Oct 07 08:21:08 crc kubenswrapper[4875]: I1007 08:21:08.791181 4875 scope.go:117] "RemoveContainer" containerID="ae406dd38a834da556a799b58513720563d403738f2233e841277526cc09c399" Oct 07 08:21:08 crc kubenswrapper[4875]: E1007 08:21:08.791690 4875 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ae406dd38a834da556a799b58513720563d403738f2233e841277526cc09c399\": container with ID starting with ae406dd38a834da556a799b58513720563d403738f2233e841277526cc09c399 not found: ID does not exist" containerID="ae406dd38a834da556a799b58513720563d403738f2233e841277526cc09c399" Oct 07 08:21:08 crc kubenswrapper[4875]: I1007 08:21:08.791722 4875 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ae406dd38a834da556a799b58513720563d403738f2233e841277526cc09c399"} err="failed to get container status \"ae406dd38a834da556a799b58513720563d403738f2233e841277526cc09c399\": rpc error: code = NotFound desc = could not find container \"ae406dd38a834da556a799b58513720563d403738f2233e841277526cc09c399\": container with ID starting with ae406dd38a834da556a799b58513720563d403738f2233e841277526cc09c399 not found: ID does not exist" Oct 07 08:21:08 crc kubenswrapper[4875]: I1007 08:21:08.791743 4875 scope.go:117] "RemoveContainer" containerID="36bd4c025746e649b450103d7ffc83d289f10676cac358e06977741b5ff08690" Oct 07 08:21:08 crc kubenswrapper[4875]: E1007 08:21:08.792156 4875 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"36bd4c025746e649b450103d7ffc83d289f10676cac358e06977741b5ff08690\": container with ID starting with 36bd4c025746e649b450103d7ffc83d289f10676cac358e06977741b5ff08690 not found: ID does not exist" containerID="36bd4c025746e649b450103d7ffc83d289f10676cac358e06977741b5ff08690" Oct 07 08:21:08 crc kubenswrapper[4875]: I1007 08:21:08.792176 4875 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"36bd4c025746e649b450103d7ffc83d289f10676cac358e06977741b5ff08690"} err="failed to get container status \"36bd4c025746e649b450103d7ffc83d289f10676cac358e06977741b5ff08690\": rpc error: code = NotFound desc = could not find container \"36bd4c025746e649b450103d7ffc83d289f10676cac358e06977741b5ff08690\": container with ID starting with 36bd4c025746e649b450103d7ffc83d289f10676cac358e06977741b5ff08690 not found: ID does not exist" Oct 07 08:21:08 crc kubenswrapper[4875]: I1007 08:21:08.792189 4875 scope.go:117] "RemoveContainer" containerID="6668a1d7eaf54d379753a2f0469ea0d26675ad9c817b1932dcb13267f19cadf4" Oct 07 08:21:08 crc kubenswrapper[4875]: E1007 08:21:08.792511 4875 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6668a1d7eaf54d379753a2f0469ea0d26675ad9c817b1932dcb13267f19cadf4\": container with ID starting with 6668a1d7eaf54d379753a2f0469ea0d26675ad9c817b1932dcb13267f19cadf4 not found: ID does not exist" containerID="6668a1d7eaf54d379753a2f0469ea0d26675ad9c817b1932dcb13267f19cadf4" Oct 07 08:21:08 crc kubenswrapper[4875]: I1007 08:21:08.792533 4875 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6668a1d7eaf54d379753a2f0469ea0d26675ad9c817b1932dcb13267f19cadf4"} err="failed to get container status \"6668a1d7eaf54d379753a2f0469ea0d26675ad9c817b1932dcb13267f19cadf4\": rpc error: code = NotFound desc = could not find container \"6668a1d7eaf54d379753a2f0469ea0d26675ad9c817b1932dcb13267f19cadf4\": container with ID starting with 6668a1d7eaf54d379753a2f0469ea0d26675ad9c817b1932dcb13267f19cadf4 not found: ID does not exist" Oct 07 08:21:09 crc kubenswrapper[4875]: I1007 08:21:09.716091 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1b15bb23-8d95-4bd6-aae0-9c0220bcc05b" path="/var/lib/kubelet/pods/1b15bb23-8d95-4bd6-aae0-9c0220bcc05b/volumes" Oct 07 08:21:14 crc kubenswrapper[4875]: I1007 08:21:14.166279 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-6jvqw" Oct 07 08:21:14 crc kubenswrapper[4875]: I1007 08:21:14.166826 4875 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-6jvqw" Oct 07 08:21:14 crc kubenswrapper[4875]: I1007 08:21:14.218206 4875 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-6jvqw" Oct 07 08:21:14 crc kubenswrapper[4875]: I1007 08:21:14.798231 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-6jvqw" Oct 07 08:21:14 crc kubenswrapper[4875]: I1007 08:21:14.852406 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-6jvqw"] Oct 07 08:21:16 crc kubenswrapper[4875]: I1007 08:21:16.780760 4875 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-6jvqw" podUID="5a8be590-25cd-4d86-b2df-eb5906b3ba2e" containerName="registry-server" containerID="cri-o://5e912bdc4d1b147bf8cf1f2c853497db325dc820e1fd9599ca503a17af485172" gracePeriod=2 Oct 07 08:21:17 crc kubenswrapper[4875]: I1007 08:21:17.230323 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-6jvqw" Oct 07 08:21:17 crc kubenswrapper[4875]: I1007 08:21:17.346406 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5a8be590-25cd-4d86-b2df-eb5906b3ba2e-catalog-content\") pod \"5a8be590-25cd-4d86-b2df-eb5906b3ba2e\" (UID: \"5a8be590-25cd-4d86-b2df-eb5906b3ba2e\") " Oct 07 08:21:17 crc kubenswrapper[4875]: I1007 08:21:17.346969 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5a8be590-25cd-4d86-b2df-eb5906b3ba2e-utilities\") pod \"5a8be590-25cd-4d86-b2df-eb5906b3ba2e\" (UID: \"5a8be590-25cd-4d86-b2df-eb5906b3ba2e\") " Oct 07 08:21:17 crc kubenswrapper[4875]: I1007 08:21:17.347133 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zng29\" (UniqueName: \"kubernetes.io/projected/5a8be590-25cd-4d86-b2df-eb5906b3ba2e-kube-api-access-zng29\") pod \"5a8be590-25cd-4d86-b2df-eb5906b3ba2e\" (UID: \"5a8be590-25cd-4d86-b2df-eb5906b3ba2e\") " Oct 07 08:21:17 crc kubenswrapper[4875]: I1007 08:21:17.348191 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5a8be590-25cd-4d86-b2df-eb5906b3ba2e-utilities" (OuterVolumeSpecName: "utilities") pod "5a8be590-25cd-4d86-b2df-eb5906b3ba2e" (UID: "5a8be590-25cd-4d86-b2df-eb5906b3ba2e"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 08:21:17 crc kubenswrapper[4875]: I1007 08:21:17.356893 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5a8be590-25cd-4d86-b2df-eb5906b3ba2e-kube-api-access-zng29" (OuterVolumeSpecName: "kube-api-access-zng29") pod "5a8be590-25cd-4d86-b2df-eb5906b3ba2e" (UID: "5a8be590-25cd-4d86-b2df-eb5906b3ba2e"). InnerVolumeSpecName "kube-api-access-zng29". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 08:21:17 crc kubenswrapper[4875]: I1007 08:21:17.366119 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5a8be590-25cd-4d86-b2df-eb5906b3ba2e-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5a8be590-25cd-4d86-b2df-eb5906b3ba2e" (UID: "5a8be590-25cd-4d86-b2df-eb5906b3ba2e"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 08:21:17 crc kubenswrapper[4875]: I1007 08:21:17.449710 4875 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5a8be590-25cd-4d86-b2df-eb5906b3ba2e-utilities\") on node \"crc\" DevicePath \"\"" Oct 07 08:21:17 crc kubenswrapper[4875]: I1007 08:21:17.449746 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zng29\" (UniqueName: \"kubernetes.io/projected/5a8be590-25cd-4d86-b2df-eb5906b3ba2e-kube-api-access-zng29\") on node \"crc\" DevicePath \"\"" Oct 07 08:21:17 crc kubenswrapper[4875]: I1007 08:21:17.449756 4875 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5a8be590-25cd-4d86-b2df-eb5906b3ba2e-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 07 08:21:17 crc kubenswrapper[4875]: I1007 08:21:17.792014 4875 generic.go:334] "Generic (PLEG): container finished" podID="5a8be590-25cd-4d86-b2df-eb5906b3ba2e" containerID="5e912bdc4d1b147bf8cf1f2c853497db325dc820e1fd9599ca503a17af485172" exitCode=0 Oct 07 08:21:17 crc kubenswrapper[4875]: I1007 08:21:17.792066 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6jvqw" event={"ID":"5a8be590-25cd-4d86-b2df-eb5906b3ba2e","Type":"ContainerDied","Data":"5e912bdc4d1b147bf8cf1f2c853497db325dc820e1fd9599ca503a17af485172"} Oct 07 08:21:17 crc kubenswrapper[4875]: I1007 08:21:17.792087 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-6jvqw" Oct 07 08:21:17 crc kubenswrapper[4875]: I1007 08:21:17.792109 4875 scope.go:117] "RemoveContainer" containerID="5e912bdc4d1b147bf8cf1f2c853497db325dc820e1fd9599ca503a17af485172" Oct 07 08:21:17 crc kubenswrapper[4875]: I1007 08:21:17.792097 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6jvqw" event={"ID":"5a8be590-25cd-4d86-b2df-eb5906b3ba2e","Type":"ContainerDied","Data":"468198b602e461d6247de525fed4cb439db35654152e3c7e2cd05194f0864792"} Oct 07 08:21:17 crc kubenswrapper[4875]: I1007 08:21:17.814390 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-6jvqw"] Oct 07 08:21:17 crc kubenswrapper[4875]: I1007 08:21:17.818072 4875 scope.go:117] "RemoveContainer" containerID="dd7cf378e12b31f573d06347f62a33403e4c7ee17f9084bb5d49147073da2123" Oct 07 08:21:17 crc kubenswrapper[4875]: I1007 08:21:17.821973 4875 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-6jvqw"] Oct 07 08:21:17 crc kubenswrapper[4875]: I1007 08:21:17.846005 4875 scope.go:117] "RemoveContainer" containerID="81b0c7b667bcd837eb0dfb73cc81182fe8247075fffb13670ad2990a8baf2a2b" Oct 07 08:21:17 crc kubenswrapper[4875]: I1007 08:21:17.888739 4875 scope.go:117] "RemoveContainer" containerID="5e912bdc4d1b147bf8cf1f2c853497db325dc820e1fd9599ca503a17af485172" Oct 07 08:21:17 crc kubenswrapper[4875]: E1007 08:21:17.893035 4875 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5e912bdc4d1b147bf8cf1f2c853497db325dc820e1fd9599ca503a17af485172\": container with ID starting with 5e912bdc4d1b147bf8cf1f2c853497db325dc820e1fd9599ca503a17af485172 not found: ID does not exist" containerID="5e912bdc4d1b147bf8cf1f2c853497db325dc820e1fd9599ca503a17af485172" Oct 07 08:21:17 crc kubenswrapper[4875]: I1007 08:21:17.893076 4875 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5e912bdc4d1b147bf8cf1f2c853497db325dc820e1fd9599ca503a17af485172"} err="failed to get container status \"5e912bdc4d1b147bf8cf1f2c853497db325dc820e1fd9599ca503a17af485172\": rpc error: code = NotFound desc = could not find container \"5e912bdc4d1b147bf8cf1f2c853497db325dc820e1fd9599ca503a17af485172\": container with ID starting with 5e912bdc4d1b147bf8cf1f2c853497db325dc820e1fd9599ca503a17af485172 not found: ID does not exist" Oct 07 08:21:17 crc kubenswrapper[4875]: I1007 08:21:17.893103 4875 scope.go:117] "RemoveContainer" containerID="dd7cf378e12b31f573d06347f62a33403e4c7ee17f9084bb5d49147073da2123" Oct 07 08:21:17 crc kubenswrapper[4875]: E1007 08:21:17.893518 4875 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dd7cf378e12b31f573d06347f62a33403e4c7ee17f9084bb5d49147073da2123\": container with ID starting with dd7cf378e12b31f573d06347f62a33403e4c7ee17f9084bb5d49147073da2123 not found: ID does not exist" containerID="dd7cf378e12b31f573d06347f62a33403e4c7ee17f9084bb5d49147073da2123" Oct 07 08:21:17 crc kubenswrapper[4875]: I1007 08:21:17.893557 4875 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dd7cf378e12b31f573d06347f62a33403e4c7ee17f9084bb5d49147073da2123"} err="failed to get container status \"dd7cf378e12b31f573d06347f62a33403e4c7ee17f9084bb5d49147073da2123\": rpc error: code = NotFound desc = could not find container \"dd7cf378e12b31f573d06347f62a33403e4c7ee17f9084bb5d49147073da2123\": container with ID starting with dd7cf378e12b31f573d06347f62a33403e4c7ee17f9084bb5d49147073da2123 not found: ID does not exist" Oct 07 08:21:17 crc kubenswrapper[4875]: I1007 08:21:17.893584 4875 scope.go:117] "RemoveContainer" containerID="81b0c7b667bcd837eb0dfb73cc81182fe8247075fffb13670ad2990a8baf2a2b" Oct 07 08:21:17 crc kubenswrapper[4875]: E1007 08:21:17.893997 4875 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"81b0c7b667bcd837eb0dfb73cc81182fe8247075fffb13670ad2990a8baf2a2b\": container with ID starting with 81b0c7b667bcd837eb0dfb73cc81182fe8247075fffb13670ad2990a8baf2a2b not found: ID does not exist" containerID="81b0c7b667bcd837eb0dfb73cc81182fe8247075fffb13670ad2990a8baf2a2b" Oct 07 08:21:17 crc kubenswrapper[4875]: I1007 08:21:17.894033 4875 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"81b0c7b667bcd837eb0dfb73cc81182fe8247075fffb13670ad2990a8baf2a2b"} err="failed to get container status \"81b0c7b667bcd837eb0dfb73cc81182fe8247075fffb13670ad2990a8baf2a2b\": rpc error: code = NotFound desc = could not find container \"81b0c7b667bcd837eb0dfb73cc81182fe8247075fffb13670ad2990a8baf2a2b\": container with ID starting with 81b0c7b667bcd837eb0dfb73cc81182fe8247075fffb13670ad2990a8baf2a2b not found: ID does not exist" Oct 07 08:21:18 crc kubenswrapper[4875]: I1007 08:21:18.210424 4875 scope.go:117] "RemoveContainer" containerID="dd78c9155d1b48b3d3ebbfc6e7bbcdc623c33202681dc7ceb4d56d158ee1623d" Oct 07 08:21:18 crc kubenswrapper[4875]: I1007 08:21:18.230441 4875 scope.go:117] "RemoveContainer" containerID="63e938d159ea8e0c387d1593fc50a516bccb3e704f9694e1e8a2ba237ecf73d1" Oct 07 08:21:18 crc kubenswrapper[4875]: I1007 08:21:18.247636 4875 scope.go:117] "RemoveContainer" containerID="76b93e0b875b2d2d0f74dc9a6fa5c4aac8377a5347a1cc8996cb5420e6472bd4" Oct 07 08:21:18 crc kubenswrapper[4875]: I1007 08:21:18.268782 4875 scope.go:117] "RemoveContainer" containerID="ce9aaedc6273f3fa2665700ce33115fb2ef8d04b2a1b9f22bddacd8117ea1c89" Oct 07 08:21:19 crc kubenswrapper[4875]: I1007 08:21:19.707548 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5a8be590-25cd-4d86-b2df-eb5906b3ba2e" path="/var/lib/kubelet/pods/5a8be590-25cd-4d86-b2df-eb5906b3ba2e/volumes" Oct 07 08:21:31 crc kubenswrapper[4875]: I1007 08:21:31.220819 4875 patch_prober.go:28] interesting pod/machine-config-daemon-hx68m container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 07 08:21:31 crc kubenswrapper[4875]: I1007 08:21:31.221283 4875 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" podUID="3928c10c-c3da-41eb-96b2-629d67cfb31f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 07 08:21:31 crc kubenswrapper[4875]: I1007 08:21:31.221329 4875 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" Oct 07 08:21:31 crc kubenswrapper[4875]: I1007 08:21:31.222127 4875 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"e7a8b49c8a3c1b65fb51ee691d301a717bb3f37d230c7e4911b755a2a993aaef"} pod="openshift-machine-config-operator/machine-config-daemon-hx68m" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 07 08:21:31 crc kubenswrapper[4875]: I1007 08:21:31.222171 4875 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" podUID="3928c10c-c3da-41eb-96b2-629d67cfb31f" containerName="machine-config-daemon" containerID="cri-o://e7a8b49c8a3c1b65fb51ee691d301a717bb3f37d230c7e4911b755a2a993aaef" gracePeriod=600 Oct 07 08:21:31 crc kubenswrapper[4875]: E1007 08:21:31.347457 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hx68m_openshift-machine-config-operator(3928c10c-c3da-41eb-96b2-629d67cfb31f)\"" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" podUID="3928c10c-c3da-41eb-96b2-629d67cfb31f" Oct 07 08:21:31 crc kubenswrapper[4875]: I1007 08:21:31.921227 4875 generic.go:334] "Generic (PLEG): container finished" podID="3928c10c-c3da-41eb-96b2-629d67cfb31f" containerID="e7a8b49c8a3c1b65fb51ee691d301a717bb3f37d230c7e4911b755a2a993aaef" exitCode=0 Oct 07 08:21:31 crc kubenswrapper[4875]: I1007 08:21:31.921274 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" event={"ID":"3928c10c-c3da-41eb-96b2-629d67cfb31f","Type":"ContainerDied","Data":"e7a8b49c8a3c1b65fb51ee691d301a717bb3f37d230c7e4911b755a2a993aaef"} Oct 07 08:21:31 crc kubenswrapper[4875]: I1007 08:21:31.921307 4875 scope.go:117] "RemoveContainer" containerID="55efb217a04e70bc6e7faba22ad468c623c75c0fd9c6f3ce56027a9559dc9a47" Oct 07 08:21:31 crc kubenswrapper[4875]: I1007 08:21:31.922042 4875 scope.go:117] "RemoveContainer" containerID="e7a8b49c8a3c1b65fb51ee691d301a717bb3f37d230c7e4911b755a2a993aaef" Oct 07 08:21:31 crc kubenswrapper[4875]: E1007 08:21:31.922292 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hx68m_openshift-machine-config-operator(3928c10c-c3da-41eb-96b2-629d67cfb31f)\"" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" podUID="3928c10c-c3da-41eb-96b2-629d67cfb31f" Oct 07 08:21:32 crc kubenswrapper[4875]: I1007 08:21:32.933969 4875 generic.go:334] "Generic (PLEG): container finished" podID="15ba8b2a-ed31-47c5-b655-efb44ceb0134" containerID="cd956fcbf87f8ea46040ff0d1ac3e084075ecd1eb325be550e86eefb89d25581" exitCode=0 Oct 07 08:21:32 crc kubenswrapper[4875]: I1007 08:21:32.934009 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-wd6f4" event={"ID":"15ba8b2a-ed31-47c5-b655-efb44ceb0134","Type":"ContainerDied","Data":"cd956fcbf87f8ea46040ff0d1ac3e084075ecd1eb325be550e86eefb89d25581"} Oct 07 08:21:34 crc kubenswrapper[4875]: I1007 08:21:34.353236 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-wd6f4" Oct 07 08:21:34 crc kubenswrapper[4875]: I1007 08:21:34.475111 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/15ba8b2a-ed31-47c5-b655-efb44ceb0134-bootstrap-combined-ca-bundle\") pod \"15ba8b2a-ed31-47c5-b655-efb44ceb0134\" (UID: \"15ba8b2a-ed31-47c5-b655-efb44ceb0134\") " Oct 07 08:21:34 crc kubenswrapper[4875]: I1007 08:21:34.475193 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4tz88\" (UniqueName: \"kubernetes.io/projected/15ba8b2a-ed31-47c5-b655-efb44ceb0134-kube-api-access-4tz88\") pod \"15ba8b2a-ed31-47c5-b655-efb44ceb0134\" (UID: \"15ba8b2a-ed31-47c5-b655-efb44ceb0134\") " Oct 07 08:21:34 crc kubenswrapper[4875]: I1007 08:21:34.475222 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/15ba8b2a-ed31-47c5-b655-efb44ceb0134-ssh-key\") pod \"15ba8b2a-ed31-47c5-b655-efb44ceb0134\" (UID: \"15ba8b2a-ed31-47c5-b655-efb44ceb0134\") " Oct 07 08:21:34 crc kubenswrapper[4875]: I1007 08:21:34.475285 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/15ba8b2a-ed31-47c5-b655-efb44ceb0134-inventory\") pod \"15ba8b2a-ed31-47c5-b655-efb44ceb0134\" (UID: \"15ba8b2a-ed31-47c5-b655-efb44ceb0134\") " Oct 07 08:21:34 crc kubenswrapper[4875]: I1007 08:21:34.485021 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/15ba8b2a-ed31-47c5-b655-efb44ceb0134-kube-api-access-4tz88" (OuterVolumeSpecName: "kube-api-access-4tz88") pod "15ba8b2a-ed31-47c5-b655-efb44ceb0134" (UID: "15ba8b2a-ed31-47c5-b655-efb44ceb0134"). InnerVolumeSpecName "kube-api-access-4tz88". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 08:21:34 crc kubenswrapper[4875]: I1007 08:21:34.488250 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/15ba8b2a-ed31-47c5-b655-efb44ceb0134-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "15ba8b2a-ed31-47c5-b655-efb44ceb0134" (UID: "15ba8b2a-ed31-47c5-b655-efb44ceb0134"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:21:34 crc kubenswrapper[4875]: I1007 08:21:34.505612 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/15ba8b2a-ed31-47c5-b655-efb44ceb0134-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "15ba8b2a-ed31-47c5-b655-efb44ceb0134" (UID: "15ba8b2a-ed31-47c5-b655-efb44ceb0134"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:21:34 crc kubenswrapper[4875]: I1007 08:21:34.523257 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/15ba8b2a-ed31-47c5-b655-efb44ceb0134-inventory" (OuterVolumeSpecName: "inventory") pod "15ba8b2a-ed31-47c5-b655-efb44ceb0134" (UID: "15ba8b2a-ed31-47c5-b655-efb44ceb0134"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:21:34 crc kubenswrapper[4875]: I1007 08:21:34.577476 4875 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/15ba8b2a-ed31-47c5-b655-efb44ceb0134-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 08:21:34 crc kubenswrapper[4875]: I1007 08:21:34.577507 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4tz88\" (UniqueName: \"kubernetes.io/projected/15ba8b2a-ed31-47c5-b655-efb44ceb0134-kube-api-access-4tz88\") on node \"crc\" DevicePath \"\"" Oct 07 08:21:34 crc kubenswrapper[4875]: I1007 08:21:34.577529 4875 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/15ba8b2a-ed31-47c5-b655-efb44ceb0134-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 07 08:21:34 crc kubenswrapper[4875]: I1007 08:21:34.577541 4875 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/15ba8b2a-ed31-47c5-b655-efb44ceb0134-inventory\") on node \"crc\" DevicePath \"\"" Oct 07 08:21:34 crc kubenswrapper[4875]: I1007 08:21:34.953176 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-wd6f4" event={"ID":"15ba8b2a-ed31-47c5-b655-efb44ceb0134","Type":"ContainerDied","Data":"36d2f1311d81d484b1e20d7adedbaab5245ccec733c30115c3c9ca366bc9496d"} Oct 07 08:21:34 crc kubenswrapper[4875]: I1007 08:21:34.953228 4875 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="36d2f1311d81d484b1e20d7adedbaab5245ccec733c30115c3c9ca366bc9496d" Oct 07 08:21:34 crc kubenswrapper[4875]: I1007 08:21:34.953247 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-wd6f4" Oct 07 08:21:35 crc kubenswrapper[4875]: I1007 08:21:35.040964 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/download-cache-edpm-deployment-openstack-edpm-ipam-f424w"] Oct 07 08:21:35 crc kubenswrapper[4875]: E1007 08:21:35.041334 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1b15bb23-8d95-4bd6-aae0-9c0220bcc05b" containerName="extract-content" Oct 07 08:21:35 crc kubenswrapper[4875]: I1007 08:21:35.041350 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="1b15bb23-8d95-4bd6-aae0-9c0220bcc05b" containerName="extract-content" Oct 07 08:21:35 crc kubenswrapper[4875]: E1007 08:21:35.041364 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5a8be590-25cd-4d86-b2df-eb5906b3ba2e" containerName="extract-utilities" Oct 07 08:21:35 crc kubenswrapper[4875]: I1007 08:21:35.041370 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="5a8be590-25cd-4d86-b2df-eb5906b3ba2e" containerName="extract-utilities" Oct 07 08:21:35 crc kubenswrapper[4875]: E1007 08:21:35.041380 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="15ba8b2a-ed31-47c5-b655-efb44ceb0134" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Oct 07 08:21:35 crc kubenswrapper[4875]: I1007 08:21:35.041388 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="15ba8b2a-ed31-47c5-b655-efb44ceb0134" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Oct 07 08:21:35 crc kubenswrapper[4875]: E1007 08:21:35.041406 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1b15bb23-8d95-4bd6-aae0-9c0220bcc05b" containerName="registry-server" Oct 07 08:21:35 crc kubenswrapper[4875]: I1007 08:21:35.041413 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="1b15bb23-8d95-4bd6-aae0-9c0220bcc05b" containerName="registry-server" Oct 07 08:21:35 crc kubenswrapper[4875]: E1007 08:21:35.041430 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5a8be590-25cd-4d86-b2df-eb5906b3ba2e" containerName="registry-server" Oct 07 08:21:35 crc kubenswrapper[4875]: I1007 08:21:35.041436 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="5a8be590-25cd-4d86-b2df-eb5906b3ba2e" containerName="registry-server" Oct 07 08:21:35 crc kubenswrapper[4875]: E1007 08:21:35.041448 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5a8be590-25cd-4d86-b2df-eb5906b3ba2e" containerName="extract-content" Oct 07 08:21:35 crc kubenswrapper[4875]: I1007 08:21:35.041453 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="5a8be590-25cd-4d86-b2df-eb5906b3ba2e" containerName="extract-content" Oct 07 08:21:35 crc kubenswrapper[4875]: E1007 08:21:35.041465 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1b15bb23-8d95-4bd6-aae0-9c0220bcc05b" containerName="extract-utilities" Oct 07 08:21:35 crc kubenswrapper[4875]: I1007 08:21:35.041471 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="1b15bb23-8d95-4bd6-aae0-9c0220bcc05b" containerName="extract-utilities" Oct 07 08:21:35 crc kubenswrapper[4875]: I1007 08:21:35.041664 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="15ba8b2a-ed31-47c5-b655-efb44ceb0134" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Oct 07 08:21:35 crc kubenswrapper[4875]: I1007 08:21:35.041686 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="5a8be590-25cd-4d86-b2df-eb5906b3ba2e" containerName="registry-server" Oct 07 08:21:35 crc kubenswrapper[4875]: I1007 08:21:35.041696 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="1b15bb23-8d95-4bd6-aae0-9c0220bcc05b" containerName="registry-server" Oct 07 08:21:35 crc kubenswrapper[4875]: I1007 08:21:35.042343 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-f424w" Oct 07 08:21:35 crc kubenswrapper[4875]: I1007 08:21:35.044650 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 07 08:21:35 crc kubenswrapper[4875]: I1007 08:21:35.044815 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-bl4pq" Oct 07 08:21:35 crc kubenswrapper[4875]: I1007 08:21:35.044946 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 07 08:21:35 crc kubenswrapper[4875]: I1007 08:21:35.045060 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 07 08:21:35 crc kubenswrapper[4875]: E1007 08:21:35.051853 4875 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod15ba8b2a_ed31_47c5_b655_efb44ceb0134.slice/crio-36d2f1311d81d484b1e20d7adedbaab5245ccec733c30115c3c9ca366bc9496d\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod15ba8b2a_ed31_47c5_b655_efb44ceb0134.slice\": RecentStats: unable to find data in memory cache]" Oct 07 08:21:35 crc kubenswrapper[4875]: I1007 08:21:35.054520 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/download-cache-edpm-deployment-openstack-edpm-ipam-f424w"] Oct 07 08:21:35 crc kubenswrapper[4875]: I1007 08:21:35.187125 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g958r\" (UniqueName: \"kubernetes.io/projected/7b638466-3fdb-4290-8a73-9f3d018a8ee0-kube-api-access-g958r\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-f424w\" (UID: \"7b638466-3fdb-4290-8a73-9f3d018a8ee0\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-f424w" Oct 07 08:21:35 crc kubenswrapper[4875]: I1007 08:21:35.187441 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/7b638466-3fdb-4290-8a73-9f3d018a8ee0-ssh-key\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-f424w\" (UID: \"7b638466-3fdb-4290-8a73-9f3d018a8ee0\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-f424w" Oct 07 08:21:35 crc kubenswrapper[4875]: I1007 08:21:35.187465 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7b638466-3fdb-4290-8a73-9f3d018a8ee0-inventory\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-f424w\" (UID: \"7b638466-3fdb-4290-8a73-9f3d018a8ee0\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-f424w" Oct 07 08:21:35 crc kubenswrapper[4875]: I1007 08:21:35.289341 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7b638466-3fdb-4290-8a73-9f3d018a8ee0-inventory\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-f424w\" (UID: \"7b638466-3fdb-4290-8a73-9f3d018a8ee0\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-f424w" Oct 07 08:21:35 crc kubenswrapper[4875]: I1007 08:21:35.289480 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g958r\" (UniqueName: \"kubernetes.io/projected/7b638466-3fdb-4290-8a73-9f3d018a8ee0-kube-api-access-g958r\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-f424w\" (UID: \"7b638466-3fdb-4290-8a73-9f3d018a8ee0\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-f424w" Oct 07 08:21:35 crc kubenswrapper[4875]: I1007 08:21:35.289581 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/7b638466-3fdb-4290-8a73-9f3d018a8ee0-ssh-key\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-f424w\" (UID: \"7b638466-3fdb-4290-8a73-9f3d018a8ee0\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-f424w" Oct 07 08:21:35 crc kubenswrapper[4875]: I1007 08:21:35.294272 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7b638466-3fdb-4290-8a73-9f3d018a8ee0-inventory\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-f424w\" (UID: \"7b638466-3fdb-4290-8a73-9f3d018a8ee0\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-f424w" Oct 07 08:21:35 crc kubenswrapper[4875]: I1007 08:21:35.295141 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/7b638466-3fdb-4290-8a73-9f3d018a8ee0-ssh-key\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-f424w\" (UID: \"7b638466-3fdb-4290-8a73-9f3d018a8ee0\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-f424w" Oct 07 08:21:35 crc kubenswrapper[4875]: I1007 08:21:35.311727 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g958r\" (UniqueName: \"kubernetes.io/projected/7b638466-3fdb-4290-8a73-9f3d018a8ee0-kube-api-access-g958r\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-f424w\" (UID: \"7b638466-3fdb-4290-8a73-9f3d018a8ee0\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-f424w" Oct 07 08:21:35 crc kubenswrapper[4875]: I1007 08:21:35.367790 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-f424w" Oct 07 08:21:35 crc kubenswrapper[4875]: I1007 08:21:35.891948 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/download-cache-edpm-deployment-openstack-edpm-ipam-f424w"] Oct 07 08:21:35 crc kubenswrapper[4875]: I1007 08:21:35.964453 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-f424w" event={"ID":"7b638466-3fdb-4290-8a73-9f3d018a8ee0","Type":"ContainerStarted","Data":"bd72f192477bf909dfd45918abf6c11b26cfc09732fde4c42ab712d432dcecda"} Oct 07 08:21:36 crc kubenswrapper[4875]: I1007 08:21:36.976064 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-f424w" event={"ID":"7b638466-3fdb-4290-8a73-9f3d018a8ee0","Type":"ContainerStarted","Data":"104194db3586cde99184876ce012a4e4afe4367c905f6c3188eb02f7170b7a66"} Oct 07 08:21:37 crc kubenswrapper[4875]: I1007 08:21:37.004180 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-f424w" podStartSLOduration=1.441139387 podStartE2EDuration="2.004156874s" podCreationTimestamp="2025-10-07 08:21:35 +0000 UTC" firstStartedPulling="2025-10-07 08:21:35.899426688 +0000 UTC m=+1520.859197231" lastFinishedPulling="2025-10-07 08:21:36.462444175 +0000 UTC m=+1521.422214718" observedRunningTime="2025-10-07 08:21:36.994735233 +0000 UTC m=+1521.954505806" watchObservedRunningTime="2025-10-07 08:21:37.004156874 +0000 UTC m=+1521.963927417" Oct 07 08:21:44 crc kubenswrapper[4875]: I1007 08:21:44.697641 4875 scope.go:117] "RemoveContainer" containerID="e7a8b49c8a3c1b65fb51ee691d301a717bb3f37d230c7e4911b755a2a993aaef" Oct 07 08:21:44 crc kubenswrapper[4875]: E1007 08:21:44.698516 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hx68m_openshift-machine-config-operator(3928c10c-c3da-41eb-96b2-629d67cfb31f)\"" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" podUID="3928c10c-c3da-41eb-96b2-629d67cfb31f" Oct 07 08:21:59 crc kubenswrapper[4875]: I1007 08:21:59.697504 4875 scope.go:117] "RemoveContainer" containerID="e7a8b49c8a3c1b65fb51ee691d301a717bb3f37d230c7e4911b755a2a993aaef" Oct 07 08:21:59 crc kubenswrapper[4875]: E1007 08:21:59.698836 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hx68m_openshift-machine-config-operator(3928c10c-c3da-41eb-96b2-629d67cfb31f)\"" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" podUID="3928c10c-c3da-41eb-96b2-629d67cfb31f" Oct 07 08:22:14 crc kubenswrapper[4875]: I1007 08:22:14.697007 4875 scope.go:117] "RemoveContainer" containerID="e7a8b49c8a3c1b65fb51ee691d301a717bb3f37d230c7e4911b755a2a993aaef" Oct 07 08:22:14 crc kubenswrapper[4875]: E1007 08:22:14.698083 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hx68m_openshift-machine-config-operator(3928c10c-c3da-41eb-96b2-629d67cfb31f)\"" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" podUID="3928c10c-c3da-41eb-96b2-629d67cfb31f" Oct 07 08:22:18 crc kubenswrapper[4875]: I1007 08:22:18.390579 4875 scope.go:117] "RemoveContainer" containerID="ef5c55196f486fcaf605c9a56b822a4250f48fd2dec2e0d1ca942077e9642376" Oct 07 08:22:18 crc kubenswrapper[4875]: I1007 08:22:18.421440 4875 scope.go:117] "RemoveContainer" containerID="845f5361463692e00f406a302ba10885ef0daf84aac41c5c3b75abfd79f11b5c" Oct 07 08:22:18 crc kubenswrapper[4875]: I1007 08:22:18.453998 4875 scope.go:117] "RemoveContainer" containerID="25acfa43f917e4a4c187cae67cea596c56cfcd3e883b78c57c3f7215f897f29f" Oct 07 08:22:18 crc kubenswrapper[4875]: I1007 08:22:18.484799 4875 scope.go:117] "RemoveContainer" containerID="6ae498c80e5cad274e16b64a9b4db5680cd54a5995ac3d55d7d3f1fdd57a3b15" Oct 07 08:22:24 crc kubenswrapper[4875]: I1007 08:22:24.044464 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-create-5dzb8"] Oct 07 08:22:24 crc kubenswrapper[4875]: I1007 08:22:24.056601 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-create-zh4xd"] Oct 07 08:22:24 crc kubenswrapper[4875]: I1007 08:22:24.064703 4875 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-create-5dzb8"] Oct 07 08:22:24 crc kubenswrapper[4875]: I1007 08:22:24.072596 4875 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-create-zh4xd"] Oct 07 08:22:25 crc kubenswrapper[4875]: I1007 08:22:25.714380 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4fd749a4-1003-4a72-b637-98af9a1ff9f3" path="/var/lib/kubelet/pods/4fd749a4-1003-4a72-b637-98af9a1ff9f3/volumes" Oct 07 08:22:25 crc kubenswrapper[4875]: I1007 08:22:25.715312 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e5431993-98ef-4f56-ba68-210402d2be39" path="/var/lib/kubelet/pods/e5431993-98ef-4f56-ba68-210402d2be39/volumes" Oct 07 08:22:27 crc kubenswrapper[4875]: I1007 08:22:27.698158 4875 scope.go:117] "RemoveContainer" containerID="e7a8b49c8a3c1b65fb51ee691d301a717bb3f37d230c7e4911b755a2a993aaef" Oct 07 08:22:27 crc kubenswrapper[4875]: E1007 08:22:27.698733 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hx68m_openshift-machine-config-operator(3928c10c-c3da-41eb-96b2-629d67cfb31f)\"" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" podUID="3928c10c-c3da-41eb-96b2-629d67cfb31f" Oct 07 08:22:32 crc kubenswrapper[4875]: I1007 08:22:32.035142 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-create-2cw6b"] Oct 07 08:22:32 crc kubenswrapper[4875]: I1007 08:22:32.047954 4875 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-create-2cw6b"] Oct 07 08:22:33 crc kubenswrapper[4875]: I1007 08:22:33.031136 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-8477-account-create-7z2lk"] Oct 07 08:22:33 crc kubenswrapper[4875]: I1007 08:22:33.038270 4875 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-8477-account-create-7z2lk"] Oct 07 08:22:33 crc kubenswrapper[4875]: I1007 08:22:33.713549 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1f4af0be-71a8-464b-90d9-909fcbe4cf80" path="/var/lib/kubelet/pods/1f4af0be-71a8-464b-90d9-909fcbe4cf80/volumes" Oct 07 08:22:33 crc kubenswrapper[4875]: I1007 08:22:33.714225 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f2a895c5-0206-4d29-b1e8-ef70102ec8bd" path="/var/lib/kubelet/pods/f2a895c5-0206-4d29-b1e8-ef70102ec8bd/volumes" Oct 07 08:22:39 crc kubenswrapper[4875]: I1007 08:22:39.698366 4875 scope.go:117] "RemoveContainer" containerID="e7a8b49c8a3c1b65fb51ee691d301a717bb3f37d230c7e4911b755a2a993aaef" Oct 07 08:22:39 crc kubenswrapper[4875]: E1007 08:22:39.699617 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hx68m_openshift-machine-config-operator(3928c10c-c3da-41eb-96b2-629d67cfb31f)\"" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" podUID="3928c10c-c3da-41eb-96b2-629d67cfb31f" Oct 07 08:22:47 crc kubenswrapper[4875]: I1007 08:22:47.058964 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-68b7-account-create-xpwdw"] Oct 07 08:22:47 crc kubenswrapper[4875]: I1007 08:22:47.069757 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-9c6a-account-create-xzc6c"] Oct 07 08:22:47 crc kubenswrapper[4875]: I1007 08:22:47.082778 4875 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-9c6a-account-create-xzc6c"] Oct 07 08:22:47 crc kubenswrapper[4875]: I1007 08:22:47.094865 4875 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-68b7-account-create-xpwdw"] Oct 07 08:22:47 crc kubenswrapper[4875]: I1007 08:22:47.712367 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cef4a289-8a9d-48de-bbc3-fa5c5a7e2e9c" path="/var/lib/kubelet/pods/cef4a289-8a9d-48de-bbc3-fa5c5a7e2e9c/volumes" Oct 07 08:22:47 crc kubenswrapper[4875]: I1007 08:22:47.713086 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e50c9707-1542-4976-ac6b-97e240bc2a47" path="/var/lib/kubelet/pods/e50c9707-1542-4976-ac6b-97e240bc2a47/volumes" Oct 07 08:22:52 crc kubenswrapper[4875]: I1007 08:22:52.698142 4875 scope.go:117] "RemoveContainer" containerID="e7a8b49c8a3c1b65fb51ee691d301a717bb3f37d230c7e4911b755a2a993aaef" Oct 07 08:22:52 crc kubenswrapper[4875]: E1007 08:22:52.698827 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hx68m_openshift-machine-config-operator(3928c10c-c3da-41eb-96b2-629d67cfb31f)\"" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" podUID="3928c10c-c3da-41eb-96b2-629d67cfb31f" Oct 07 08:22:57 crc kubenswrapper[4875]: I1007 08:22:57.052805 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-sync-frvnr"] Oct 07 08:22:57 crc kubenswrapper[4875]: I1007 08:22:57.068267 4875 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-sync-frvnr"] Oct 07 08:22:57 crc kubenswrapper[4875]: I1007 08:22:57.718094 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="831e9d9a-5dbe-4bde-bba5-e5eec53bbe9d" path="/var/lib/kubelet/pods/831e9d9a-5dbe-4bde-bba5-e5eec53bbe9d/volumes" Oct 07 08:23:07 crc kubenswrapper[4875]: I1007 08:23:07.698096 4875 scope.go:117] "RemoveContainer" containerID="e7a8b49c8a3c1b65fb51ee691d301a717bb3f37d230c7e4911b755a2a993aaef" Oct 07 08:23:07 crc kubenswrapper[4875]: E1007 08:23:07.698995 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hx68m_openshift-machine-config-operator(3928c10c-c3da-41eb-96b2-629d67cfb31f)\"" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" podUID="3928c10c-c3da-41eb-96b2-629d67cfb31f" Oct 07 08:23:10 crc kubenswrapper[4875]: I1007 08:23:10.062203 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-create-ngck8"] Oct 07 08:23:10 crc kubenswrapper[4875]: I1007 08:23:10.073783 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-create-rf76c"] Oct 07 08:23:10 crc kubenswrapper[4875]: I1007 08:23:10.082227 4875 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-create-rf76c"] Oct 07 08:23:10 crc kubenswrapper[4875]: I1007 08:23:10.089046 4875 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-create-ngck8"] Oct 07 08:23:11 crc kubenswrapper[4875]: I1007 08:23:11.055988 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-create-qrfpg"] Oct 07 08:23:11 crc kubenswrapper[4875]: I1007 08:23:11.071330 4875 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-create-qrfpg"] Oct 07 08:23:11 crc kubenswrapper[4875]: I1007 08:23:11.714820 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b44db21-b46a-4439-9bca-c118bf324259" path="/var/lib/kubelet/pods/5b44db21-b46a-4439-9bca-c118bf324259/volumes" Oct 07 08:23:11 crc kubenswrapper[4875]: I1007 08:23:11.717781 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a93ef883-6507-4397-bc94-b1979e4763b1" path="/var/lib/kubelet/pods/a93ef883-6507-4397-bc94-b1979e4763b1/volumes" Oct 07 08:23:11 crc kubenswrapper[4875]: I1007 08:23:11.719407 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="da04c041-724c-4f63-a7da-c69cbf663805" path="/var/lib/kubelet/pods/da04c041-724c-4f63-a7da-c69cbf663805/volumes" Oct 07 08:23:18 crc kubenswrapper[4875]: I1007 08:23:18.562483 4875 scope.go:117] "RemoveContainer" containerID="e2d6c24a72bbf10bf096552144f309944909c8b979386f7c99e9298a5e8b470f" Oct 07 08:23:18 crc kubenswrapper[4875]: I1007 08:23:18.622662 4875 scope.go:117] "RemoveContainer" containerID="9d5172e3eb8ff1d18717a04fd657a3a810ea415aeb50644ee62d5d081a2d2aa9" Oct 07 08:23:18 crc kubenswrapper[4875]: I1007 08:23:18.685064 4875 scope.go:117] "RemoveContainer" containerID="317e9e19e5736dc5700c8cacdf2b3086fde9220f18b8c75875f2aba331e5b1b7" Oct 07 08:23:18 crc kubenswrapper[4875]: I1007 08:23:18.721812 4875 scope.go:117] "RemoveContainer" containerID="a11ea7ea99e1eebae9e175718d1b4ce6a140230b9daa178b6d6617af6ba118e8" Oct 07 08:23:18 crc kubenswrapper[4875]: I1007 08:23:18.768331 4875 scope.go:117] "RemoveContainer" containerID="14fc663b076f60e9ee988ab1ae4dbbf94f78059f1b1a8cf7acb9531ed7a1a634" Oct 07 08:23:18 crc kubenswrapper[4875]: I1007 08:23:18.812911 4875 scope.go:117] "RemoveContainer" containerID="02e6c6cc447c817cebb17e57f6cf13e32ab4ff374855f749e17e8ddd521007bc" Oct 07 08:23:18 crc kubenswrapper[4875]: I1007 08:23:18.867490 4875 scope.go:117] "RemoveContainer" containerID="535c8ae83c7e60d6cc18ef9d9c98aa0b4e125ca84130b7c621a2defb1f269c5f" Oct 07 08:23:18 crc kubenswrapper[4875]: I1007 08:23:18.890651 4875 scope.go:117] "RemoveContainer" containerID="5bb5319e164fddf9e9216540479deb2613a098d73ececb4120869f45667e0ac0" Oct 07 08:23:18 crc kubenswrapper[4875]: I1007 08:23:18.914658 4875 scope.go:117] "RemoveContainer" containerID="7bbfa485bb990a2b4c13dcfc288cd81aa9bf1af4ba88f5e48d9462350d9b3326" Oct 07 08:23:18 crc kubenswrapper[4875]: I1007 08:23:18.941414 4875 scope.go:117] "RemoveContainer" containerID="a6b71bbffe51529dbd34c3083681a4cfd4ce8ca812d4b3936851251d83171151" Oct 07 08:23:19 crc kubenswrapper[4875]: I1007 08:23:19.038967 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-sync-jngqj"] Oct 07 08:23:19 crc kubenswrapper[4875]: I1007 08:23:19.046689 4875 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-sync-jngqj"] Oct 07 08:23:19 crc kubenswrapper[4875]: I1007 08:23:19.715107 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c5c73783-793b-408c-bef0-105a39960aa2" path="/var/lib/kubelet/pods/c5c73783-793b-408c-bef0-105a39960aa2/volumes" Oct 07 08:23:20 crc kubenswrapper[4875]: I1007 08:23:20.043254 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-fcfd-account-create-l824k"] Oct 07 08:23:20 crc kubenswrapper[4875]: I1007 08:23:20.060793 4875 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-fcfd-account-create-l824k"] Oct 07 08:23:21 crc kubenswrapper[4875]: I1007 08:23:21.033380 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-f53c-account-create-gm78b"] Oct 07 08:23:21 crc kubenswrapper[4875]: I1007 08:23:21.042021 4875 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-f53c-account-create-gm78b"] Oct 07 08:23:21 crc kubenswrapper[4875]: I1007 08:23:21.176561 4875 generic.go:334] "Generic (PLEG): container finished" podID="7b638466-3fdb-4290-8a73-9f3d018a8ee0" containerID="104194db3586cde99184876ce012a4e4afe4367c905f6c3188eb02f7170b7a66" exitCode=0 Oct 07 08:23:21 crc kubenswrapper[4875]: I1007 08:23:21.176606 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-f424w" event={"ID":"7b638466-3fdb-4290-8a73-9f3d018a8ee0","Type":"ContainerDied","Data":"104194db3586cde99184876ce012a4e4afe4367c905f6c3188eb02f7170b7a66"} Oct 07 08:23:21 crc kubenswrapper[4875]: I1007 08:23:21.697599 4875 scope.go:117] "RemoveContainer" containerID="e7a8b49c8a3c1b65fb51ee691d301a717bb3f37d230c7e4911b755a2a993aaef" Oct 07 08:23:21 crc kubenswrapper[4875]: E1007 08:23:21.698173 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hx68m_openshift-machine-config-operator(3928c10c-c3da-41eb-96b2-629d67cfb31f)\"" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" podUID="3928c10c-c3da-41eb-96b2-629d67cfb31f" Oct 07 08:23:21 crc kubenswrapper[4875]: I1007 08:23:21.712208 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="be2a7797-874d-438b-a3a6-8b19d97011df" path="/var/lib/kubelet/pods/be2a7797-874d-438b-a3a6-8b19d97011df/volumes" Oct 07 08:23:21 crc kubenswrapper[4875]: I1007 08:23:21.713972 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f9d30e8d-f3fb-4aec-8b1a-0546a89bda6d" path="/var/lib/kubelet/pods/f9d30e8d-f3fb-4aec-8b1a-0546a89bda6d/volumes" Oct 07 08:23:22 crc kubenswrapper[4875]: I1007 08:23:22.025957 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-b4f5-account-create-npspj"] Oct 07 08:23:22 crc kubenswrapper[4875]: I1007 08:23:22.034178 4875 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-b4f5-account-create-npspj"] Oct 07 08:23:22 crc kubenswrapper[4875]: I1007 08:23:22.585054 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-f424w" Oct 07 08:23:22 crc kubenswrapper[4875]: I1007 08:23:22.748174 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7b638466-3fdb-4290-8a73-9f3d018a8ee0-inventory\") pod \"7b638466-3fdb-4290-8a73-9f3d018a8ee0\" (UID: \"7b638466-3fdb-4290-8a73-9f3d018a8ee0\") " Oct 07 08:23:22 crc kubenswrapper[4875]: I1007 08:23:22.748226 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g958r\" (UniqueName: \"kubernetes.io/projected/7b638466-3fdb-4290-8a73-9f3d018a8ee0-kube-api-access-g958r\") pod \"7b638466-3fdb-4290-8a73-9f3d018a8ee0\" (UID: \"7b638466-3fdb-4290-8a73-9f3d018a8ee0\") " Oct 07 08:23:22 crc kubenswrapper[4875]: I1007 08:23:22.748314 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/7b638466-3fdb-4290-8a73-9f3d018a8ee0-ssh-key\") pod \"7b638466-3fdb-4290-8a73-9f3d018a8ee0\" (UID: \"7b638466-3fdb-4290-8a73-9f3d018a8ee0\") " Oct 07 08:23:22 crc kubenswrapper[4875]: I1007 08:23:22.758714 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7b638466-3fdb-4290-8a73-9f3d018a8ee0-kube-api-access-g958r" (OuterVolumeSpecName: "kube-api-access-g958r") pod "7b638466-3fdb-4290-8a73-9f3d018a8ee0" (UID: "7b638466-3fdb-4290-8a73-9f3d018a8ee0"). InnerVolumeSpecName "kube-api-access-g958r". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 08:23:22 crc kubenswrapper[4875]: I1007 08:23:22.780385 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7b638466-3fdb-4290-8a73-9f3d018a8ee0-inventory" (OuterVolumeSpecName: "inventory") pod "7b638466-3fdb-4290-8a73-9f3d018a8ee0" (UID: "7b638466-3fdb-4290-8a73-9f3d018a8ee0"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:23:22 crc kubenswrapper[4875]: I1007 08:23:22.782563 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7b638466-3fdb-4290-8a73-9f3d018a8ee0-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "7b638466-3fdb-4290-8a73-9f3d018a8ee0" (UID: "7b638466-3fdb-4290-8a73-9f3d018a8ee0"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:23:22 crc kubenswrapper[4875]: I1007 08:23:22.850527 4875 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/7b638466-3fdb-4290-8a73-9f3d018a8ee0-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 07 08:23:22 crc kubenswrapper[4875]: I1007 08:23:22.850564 4875 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7b638466-3fdb-4290-8a73-9f3d018a8ee0-inventory\") on node \"crc\" DevicePath \"\"" Oct 07 08:23:22 crc kubenswrapper[4875]: I1007 08:23:22.850574 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g958r\" (UniqueName: \"kubernetes.io/projected/7b638466-3fdb-4290-8a73-9f3d018a8ee0-kube-api-access-g958r\") on node \"crc\" DevicePath \"\"" Oct 07 08:23:23 crc kubenswrapper[4875]: I1007 08:23:23.202069 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-f424w" event={"ID":"7b638466-3fdb-4290-8a73-9f3d018a8ee0","Type":"ContainerDied","Data":"bd72f192477bf909dfd45918abf6c11b26cfc09732fde4c42ab712d432dcecda"} Oct 07 08:23:23 crc kubenswrapper[4875]: I1007 08:23:23.202110 4875 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="bd72f192477bf909dfd45918abf6c11b26cfc09732fde4c42ab712d432dcecda" Oct 07 08:23:23 crc kubenswrapper[4875]: I1007 08:23:23.202168 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-f424w" Oct 07 08:23:23 crc kubenswrapper[4875]: I1007 08:23:23.285988 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-jxf7h"] Oct 07 08:23:23 crc kubenswrapper[4875]: E1007 08:23:23.286470 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7b638466-3fdb-4290-8a73-9f3d018a8ee0" containerName="download-cache-edpm-deployment-openstack-edpm-ipam" Oct 07 08:23:23 crc kubenswrapper[4875]: I1007 08:23:23.286499 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="7b638466-3fdb-4290-8a73-9f3d018a8ee0" containerName="download-cache-edpm-deployment-openstack-edpm-ipam" Oct 07 08:23:23 crc kubenswrapper[4875]: I1007 08:23:23.286738 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="7b638466-3fdb-4290-8a73-9f3d018a8ee0" containerName="download-cache-edpm-deployment-openstack-edpm-ipam" Oct 07 08:23:23 crc kubenswrapper[4875]: I1007 08:23:23.287609 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-jxf7h" Oct 07 08:23:23 crc kubenswrapper[4875]: I1007 08:23:23.289440 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-bl4pq" Oct 07 08:23:23 crc kubenswrapper[4875]: I1007 08:23:23.289648 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 07 08:23:23 crc kubenswrapper[4875]: I1007 08:23:23.290411 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 07 08:23:23 crc kubenswrapper[4875]: I1007 08:23:23.291650 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 07 08:23:23 crc kubenswrapper[4875]: I1007 08:23:23.299393 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-jxf7h"] Oct 07 08:23:23 crc kubenswrapper[4875]: I1007 08:23:23.359591 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/7af841b2-2a3c-4cea-a5b1-5f854609190b-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-jxf7h\" (UID: \"7af841b2-2a3c-4cea-a5b1-5f854609190b\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-jxf7h" Oct 07 08:23:23 crc kubenswrapper[4875]: I1007 08:23:23.359948 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7af841b2-2a3c-4cea-a5b1-5f854609190b-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-jxf7h\" (UID: \"7af841b2-2a3c-4cea-a5b1-5f854609190b\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-jxf7h" Oct 07 08:23:23 crc kubenswrapper[4875]: I1007 08:23:23.360148 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7j9vx\" (UniqueName: \"kubernetes.io/projected/7af841b2-2a3c-4cea-a5b1-5f854609190b-kube-api-access-7j9vx\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-jxf7h\" (UID: \"7af841b2-2a3c-4cea-a5b1-5f854609190b\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-jxf7h" Oct 07 08:23:23 crc kubenswrapper[4875]: I1007 08:23:23.461909 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/7af841b2-2a3c-4cea-a5b1-5f854609190b-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-jxf7h\" (UID: \"7af841b2-2a3c-4cea-a5b1-5f854609190b\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-jxf7h" Oct 07 08:23:23 crc kubenswrapper[4875]: I1007 08:23:23.461950 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7af841b2-2a3c-4cea-a5b1-5f854609190b-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-jxf7h\" (UID: \"7af841b2-2a3c-4cea-a5b1-5f854609190b\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-jxf7h" Oct 07 08:23:23 crc kubenswrapper[4875]: I1007 08:23:23.462021 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7j9vx\" (UniqueName: \"kubernetes.io/projected/7af841b2-2a3c-4cea-a5b1-5f854609190b-kube-api-access-7j9vx\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-jxf7h\" (UID: \"7af841b2-2a3c-4cea-a5b1-5f854609190b\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-jxf7h" Oct 07 08:23:23 crc kubenswrapper[4875]: I1007 08:23:23.466099 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7af841b2-2a3c-4cea-a5b1-5f854609190b-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-jxf7h\" (UID: \"7af841b2-2a3c-4cea-a5b1-5f854609190b\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-jxf7h" Oct 07 08:23:23 crc kubenswrapper[4875]: I1007 08:23:23.466337 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/7af841b2-2a3c-4cea-a5b1-5f854609190b-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-jxf7h\" (UID: \"7af841b2-2a3c-4cea-a5b1-5f854609190b\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-jxf7h" Oct 07 08:23:23 crc kubenswrapper[4875]: I1007 08:23:23.477539 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7j9vx\" (UniqueName: \"kubernetes.io/projected/7af841b2-2a3c-4cea-a5b1-5f854609190b-kube-api-access-7j9vx\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-jxf7h\" (UID: \"7af841b2-2a3c-4cea-a5b1-5f854609190b\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-jxf7h" Oct 07 08:23:23 crc kubenswrapper[4875]: I1007 08:23:23.606664 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-jxf7h" Oct 07 08:23:23 crc kubenswrapper[4875]: I1007 08:23:23.713517 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="03f50e66-b42a-4cf1-ba35-47b295394cab" path="/var/lib/kubelet/pods/03f50e66-b42a-4cf1-ba35-47b295394cab/volumes" Oct 07 08:23:24 crc kubenswrapper[4875]: I1007 08:23:24.101664 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-jxf7h"] Oct 07 08:23:24 crc kubenswrapper[4875]: I1007 08:23:24.106135 4875 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 07 08:23:24 crc kubenswrapper[4875]: I1007 08:23:24.213572 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-jxf7h" event={"ID":"7af841b2-2a3c-4cea-a5b1-5f854609190b","Type":"ContainerStarted","Data":"c0b339d1b6636d32577eae878c5097fbd62a36fc67bee1a1fa70cde3e32e749f"} Oct 07 08:23:25 crc kubenswrapper[4875]: I1007 08:23:25.225856 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-jxf7h" event={"ID":"7af841b2-2a3c-4cea-a5b1-5f854609190b","Type":"ContainerStarted","Data":"e3e9057f3836caf8f71629ca553f6ea8cee7f51ba9001fabc7c99850393924b3"} Oct 07 08:23:25 crc kubenswrapper[4875]: I1007 08:23:25.255132 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-jxf7h" podStartSLOduration=1.632753374 podStartE2EDuration="2.255108229s" podCreationTimestamp="2025-10-07 08:23:23 +0000 UTC" firstStartedPulling="2025-10-07 08:23:24.105960826 +0000 UTC m=+1629.065731369" lastFinishedPulling="2025-10-07 08:23:24.728315641 +0000 UTC m=+1629.688086224" observedRunningTime="2025-10-07 08:23:25.246706291 +0000 UTC m=+1630.206476844" watchObservedRunningTime="2025-10-07 08:23:25.255108229 +0000 UTC m=+1630.214878782" Oct 07 08:23:32 crc kubenswrapper[4875]: I1007 08:23:32.698425 4875 scope.go:117] "RemoveContainer" containerID="e7a8b49c8a3c1b65fb51ee691d301a717bb3f37d230c7e4911b755a2a993aaef" Oct 07 08:23:32 crc kubenswrapper[4875]: E1007 08:23:32.699259 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hx68m_openshift-machine-config-operator(3928c10c-c3da-41eb-96b2-629d67cfb31f)\"" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" podUID="3928c10c-c3da-41eb-96b2-629d67cfb31f" Oct 07 08:23:44 crc kubenswrapper[4875]: I1007 08:23:44.697827 4875 scope.go:117] "RemoveContainer" containerID="e7a8b49c8a3c1b65fb51ee691d301a717bb3f37d230c7e4911b755a2a993aaef" Oct 07 08:23:44 crc kubenswrapper[4875]: E1007 08:23:44.698632 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hx68m_openshift-machine-config-operator(3928c10c-c3da-41eb-96b2-629d67cfb31f)\"" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" podUID="3928c10c-c3da-41eb-96b2-629d67cfb31f" Oct 07 08:23:53 crc kubenswrapper[4875]: I1007 08:23:53.043046 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-sync-s9cbr"] Oct 07 08:23:53 crc kubenswrapper[4875]: I1007 08:23:53.050752 4875 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-sync-s9cbr"] Oct 07 08:23:53 crc kubenswrapper[4875]: I1007 08:23:53.712813 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2c511555-5539-4e0f-9693-8ecc46fc4b8a" path="/var/lib/kubelet/pods/2c511555-5539-4e0f-9693-8ecc46fc4b8a/volumes" Oct 07 08:23:56 crc kubenswrapper[4875]: I1007 08:23:56.052667 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-48kgl"] Oct 07 08:23:56 crc kubenswrapper[4875]: I1007 08:23:56.061423 4875 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-48kgl"] Oct 07 08:23:57 crc kubenswrapper[4875]: I1007 08:23:57.718953 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8c8adc3b-9b0c-4fda-b20c-59d4c455fe72" path="/var/lib/kubelet/pods/8c8adc3b-9b0c-4fda-b20c-59d4c455fe72/volumes" Oct 07 08:23:58 crc kubenswrapper[4875]: I1007 08:23:58.697998 4875 scope.go:117] "RemoveContainer" containerID="e7a8b49c8a3c1b65fb51ee691d301a717bb3f37d230c7e4911b755a2a993aaef" Oct 07 08:23:58 crc kubenswrapper[4875]: E1007 08:23:58.698342 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hx68m_openshift-machine-config-operator(3928c10c-c3da-41eb-96b2-629d67cfb31f)\"" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" podUID="3928c10c-c3da-41eb-96b2-629d67cfb31f" Oct 07 08:23:59 crc kubenswrapper[4875]: I1007 08:23:59.024952 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-sync-km8j5"] Oct 07 08:23:59 crc kubenswrapper[4875]: I1007 08:23:59.032985 4875 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-sync-km8j5"] Oct 07 08:23:59 crc kubenswrapper[4875]: I1007 08:23:59.707729 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7ede5949-4681-4699-befa-f13a645d1f4c" path="/var/lib/kubelet/pods/7ede5949-4681-4699-befa-f13a645d1f4c/volumes" Oct 07 08:24:11 crc kubenswrapper[4875]: I1007 08:24:11.046606 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-sync-8lc4s"] Oct 07 08:24:11 crc kubenswrapper[4875]: I1007 08:24:11.062928 4875 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-sync-8lc4s"] Oct 07 08:24:11 crc kubenswrapper[4875]: I1007 08:24:11.698016 4875 scope.go:117] "RemoveContainer" containerID="e7a8b49c8a3c1b65fb51ee691d301a717bb3f37d230c7e4911b755a2a993aaef" Oct 07 08:24:11 crc kubenswrapper[4875]: E1007 08:24:11.698501 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hx68m_openshift-machine-config-operator(3928c10c-c3da-41eb-96b2-629d67cfb31f)\"" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" podUID="3928c10c-c3da-41eb-96b2-629d67cfb31f" Oct 07 08:24:11 crc kubenswrapper[4875]: I1007 08:24:11.708091 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="08b9d931-b59f-4e6c-9081-c8b918d37ba8" path="/var/lib/kubelet/pods/08b9d931-b59f-4e6c-9081-c8b918d37ba8/volumes" Oct 07 08:24:14 crc kubenswrapper[4875]: I1007 08:24:14.024213 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-sync-rrk4v"] Oct 07 08:24:14 crc kubenswrapper[4875]: I1007 08:24:14.032857 4875 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-sync-rrk4v"] Oct 07 08:24:15 crc kubenswrapper[4875]: I1007 08:24:15.708667 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="927a5f2e-c935-47bb-b7b3-0efc834566ca" path="/var/lib/kubelet/pods/927a5f2e-c935-47bb-b7b3-0efc834566ca/volumes" Oct 07 08:24:19 crc kubenswrapper[4875]: I1007 08:24:19.166560 4875 scope.go:117] "RemoveContainer" containerID="a565a3e99c2bdb53d7bf5020ac4b7dc213507bba5456cf92d5e6f46ec458e791" Oct 07 08:24:19 crc kubenswrapper[4875]: I1007 08:24:19.214719 4875 scope.go:117] "RemoveContainer" containerID="8c2adc77b7833010ad017069954d4a16c3ed404af77b83544180902cb9d6f368" Oct 07 08:24:19 crc kubenswrapper[4875]: I1007 08:24:19.262413 4875 scope.go:117] "RemoveContainer" containerID="b149f9e615fdf134670eb2d542b7a2c6abe01b98816fa89f0cd6473908b8abda" Oct 07 08:24:19 crc kubenswrapper[4875]: I1007 08:24:19.300141 4875 scope.go:117] "RemoveContainer" containerID="ee50df3801964d8bb51d2bf86612cbd96a8d0bd531bb8175b4aa379a1ff6ab62" Oct 07 08:24:19 crc kubenswrapper[4875]: I1007 08:24:19.369791 4875 scope.go:117] "RemoveContainer" containerID="9cf927b891434ca2be290755f3dfd77585171ed34edd68fc390897211e4dd3b8" Oct 07 08:24:19 crc kubenswrapper[4875]: I1007 08:24:19.435045 4875 scope.go:117] "RemoveContainer" containerID="3c57a58bee060a2594c16b9cc01373c4132f22d841e87e265afd0d2a0eff9a1c" Oct 07 08:24:19 crc kubenswrapper[4875]: I1007 08:24:19.479382 4875 scope.go:117] "RemoveContainer" containerID="964a3ded33494b263ffb7e5ac1c58bcc035dc255bba52e5e8c55795a4a414ee0" Oct 07 08:24:19 crc kubenswrapper[4875]: I1007 08:24:19.517664 4875 scope.go:117] "RemoveContainer" containerID="00e1f17dc0358d3e11a2a727768e6a2e01b317e6de939a3a7da46419b96fc8ce" Oct 07 08:24:19 crc kubenswrapper[4875]: I1007 08:24:19.544133 4875 scope.go:117] "RemoveContainer" containerID="e5ef0bbd20b650137488c0f519861274e4e023fa69487eb52d876f77086969b5" Oct 07 08:24:26 crc kubenswrapper[4875]: I1007 08:24:26.699354 4875 scope.go:117] "RemoveContainer" containerID="e7a8b49c8a3c1b65fb51ee691d301a717bb3f37d230c7e4911b755a2a993aaef" Oct 07 08:24:26 crc kubenswrapper[4875]: E1007 08:24:26.700688 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hx68m_openshift-machine-config-operator(3928c10c-c3da-41eb-96b2-629d67cfb31f)\"" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" podUID="3928c10c-c3da-41eb-96b2-629d67cfb31f" Oct 07 08:24:35 crc kubenswrapper[4875]: I1007 08:24:35.442936 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-5c52s"] Oct 07 08:24:35 crc kubenswrapper[4875]: I1007 08:24:35.446169 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-5c52s" Oct 07 08:24:35 crc kubenswrapper[4875]: I1007 08:24:35.470400 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-5c52s"] Oct 07 08:24:35 crc kubenswrapper[4875]: I1007 08:24:35.489755 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ea5f54bb-b3bb-429e-a5d2-05d983027f85-utilities\") pod \"redhat-operators-5c52s\" (UID: \"ea5f54bb-b3bb-429e-a5d2-05d983027f85\") " pod="openshift-marketplace/redhat-operators-5c52s" Oct 07 08:24:35 crc kubenswrapper[4875]: I1007 08:24:35.489818 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ea5f54bb-b3bb-429e-a5d2-05d983027f85-catalog-content\") pod \"redhat-operators-5c52s\" (UID: \"ea5f54bb-b3bb-429e-a5d2-05d983027f85\") " pod="openshift-marketplace/redhat-operators-5c52s" Oct 07 08:24:35 crc kubenswrapper[4875]: I1007 08:24:35.592211 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ea5f54bb-b3bb-429e-a5d2-05d983027f85-catalog-content\") pod \"redhat-operators-5c52s\" (UID: \"ea5f54bb-b3bb-429e-a5d2-05d983027f85\") " pod="openshift-marketplace/redhat-operators-5c52s" Oct 07 08:24:35 crc kubenswrapper[4875]: I1007 08:24:35.592628 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nsmlw\" (UniqueName: \"kubernetes.io/projected/ea5f54bb-b3bb-429e-a5d2-05d983027f85-kube-api-access-nsmlw\") pod \"redhat-operators-5c52s\" (UID: \"ea5f54bb-b3bb-429e-a5d2-05d983027f85\") " pod="openshift-marketplace/redhat-operators-5c52s" Oct 07 08:24:35 crc kubenswrapper[4875]: I1007 08:24:35.592744 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ea5f54bb-b3bb-429e-a5d2-05d983027f85-utilities\") pod \"redhat-operators-5c52s\" (UID: \"ea5f54bb-b3bb-429e-a5d2-05d983027f85\") " pod="openshift-marketplace/redhat-operators-5c52s" Oct 07 08:24:35 crc kubenswrapper[4875]: I1007 08:24:35.593136 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ea5f54bb-b3bb-429e-a5d2-05d983027f85-utilities\") pod \"redhat-operators-5c52s\" (UID: \"ea5f54bb-b3bb-429e-a5d2-05d983027f85\") " pod="openshift-marketplace/redhat-operators-5c52s" Oct 07 08:24:35 crc kubenswrapper[4875]: I1007 08:24:35.593283 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ea5f54bb-b3bb-429e-a5d2-05d983027f85-catalog-content\") pod \"redhat-operators-5c52s\" (UID: \"ea5f54bb-b3bb-429e-a5d2-05d983027f85\") " pod="openshift-marketplace/redhat-operators-5c52s" Oct 07 08:24:35 crc kubenswrapper[4875]: I1007 08:24:35.694395 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nsmlw\" (UniqueName: \"kubernetes.io/projected/ea5f54bb-b3bb-429e-a5d2-05d983027f85-kube-api-access-nsmlw\") pod \"redhat-operators-5c52s\" (UID: \"ea5f54bb-b3bb-429e-a5d2-05d983027f85\") " pod="openshift-marketplace/redhat-operators-5c52s" Oct 07 08:24:35 crc kubenswrapper[4875]: I1007 08:24:35.714455 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nsmlw\" (UniqueName: \"kubernetes.io/projected/ea5f54bb-b3bb-429e-a5d2-05d983027f85-kube-api-access-nsmlw\") pod \"redhat-operators-5c52s\" (UID: \"ea5f54bb-b3bb-429e-a5d2-05d983027f85\") " pod="openshift-marketplace/redhat-operators-5c52s" Oct 07 08:24:35 crc kubenswrapper[4875]: I1007 08:24:35.769151 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-5c52s" Oct 07 08:24:36 crc kubenswrapper[4875]: I1007 08:24:36.280843 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-5c52s"] Oct 07 08:24:36 crc kubenswrapper[4875]: I1007 08:24:36.954905 4875 generic.go:334] "Generic (PLEG): container finished" podID="ea5f54bb-b3bb-429e-a5d2-05d983027f85" containerID="d3b7ed90fe3404cfc11ce1b4da15db346ffd3bf319f5bc6fc796a36a7bd9be90" exitCode=0 Oct 07 08:24:36 crc kubenswrapper[4875]: I1007 08:24:36.955193 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5c52s" event={"ID":"ea5f54bb-b3bb-429e-a5d2-05d983027f85","Type":"ContainerDied","Data":"d3b7ed90fe3404cfc11ce1b4da15db346ffd3bf319f5bc6fc796a36a7bd9be90"} Oct 07 08:24:36 crc kubenswrapper[4875]: I1007 08:24:36.955220 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5c52s" event={"ID":"ea5f54bb-b3bb-429e-a5d2-05d983027f85","Type":"ContainerStarted","Data":"51e99fb082d6134eeffeb7624c8a7874a773fc2c6d66d069d56d7b2c0bebb948"} Oct 07 08:24:37 crc kubenswrapper[4875]: I1007 08:24:37.964337 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5c52s" event={"ID":"ea5f54bb-b3bb-429e-a5d2-05d983027f85","Type":"ContainerStarted","Data":"9dcccdeac5eff5cc15ed189b74504c98dc8a83a99fd99db703bfdb761aa6e8ee"} Oct 07 08:24:38 crc kubenswrapper[4875]: I1007 08:24:38.975777 4875 generic.go:334] "Generic (PLEG): container finished" podID="ea5f54bb-b3bb-429e-a5d2-05d983027f85" containerID="9dcccdeac5eff5cc15ed189b74504c98dc8a83a99fd99db703bfdb761aa6e8ee" exitCode=0 Oct 07 08:24:38 crc kubenswrapper[4875]: I1007 08:24:38.975961 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5c52s" event={"ID":"ea5f54bb-b3bb-429e-a5d2-05d983027f85","Type":"ContainerDied","Data":"9dcccdeac5eff5cc15ed189b74504c98dc8a83a99fd99db703bfdb761aa6e8ee"} Oct 07 08:24:39 crc kubenswrapper[4875]: I1007 08:24:39.985931 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5c52s" event={"ID":"ea5f54bb-b3bb-429e-a5d2-05d983027f85","Type":"ContainerStarted","Data":"ab1ef24cd39a42ca0817874bc955d4be77f7b27564bbc1042f771245af77ec63"} Oct 07 08:24:40 crc kubenswrapper[4875]: I1007 08:24:40.003869 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-5c52s" podStartSLOduration=2.55734814 podStartE2EDuration="5.003852695s" podCreationTimestamp="2025-10-07 08:24:35 +0000 UTC" firstStartedPulling="2025-10-07 08:24:36.956860984 +0000 UTC m=+1701.916631527" lastFinishedPulling="2025-10-07 08:24:39.403365539 +0000 UTC m=+1704.363136082" observedRunningTime="2025-10-07 08:24:40.003833624 +0000 UTC m=+1704.963604177" watchObservedRunningTime="2025-10-07 08:24:40.003852695 +0000 UTC m=+1704.963623238" Oct 07 08:24:40 crc kubenswrapper[4875]: I1007 08:24:40.697732 4875 scope.go:117] "RemoveContainer" containerID="e7a8b49c8a3c1b65fb51ee691d301a717bb3f37d230c7e4911b755a2a993aaef" Oct 07 08:24:40 crc kubenswrapper[4875]: E1007 08:24:40.698305 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hx68m_openshift-machine-config-operator(3928c10c-c3da-41eb-96b2-629d67cfb31f)\"" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" podUID="3928c10c-c3da-41eb-96b2-629d67cfb31f" Oct 07 08:24:40 crc kubenswrapper[4875]: I1007 08:24:40.999803 4875 generic.go:334] "Generic (PLEG): container finished" podID="7af841b2-2a3c-4cea-a5b1-5f854609190b" containerID="e3e9057f3836caf8f71629ca553f6ea8cee7f51ba9001fabc7c99850393924b3" exitCode=0 Oct 07 08:24:41 crc kubenswrapper[4875]: I1007 08:24:40.999911 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-jxf7h" event={"ID":"7af841b2-2a3c-4cea-a5b1-5f854609190b","Type":"ContainerDied","Data":"e3e9057f3836caf8f71629ca553f6ea8cee7f51ba9001fabc7c99850393924b3"} Oct 07 08:24:42 crc kubenswrapper[4875]: I1007 08:24:42.482271 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-jxf7h" Oct 07 08:24:42 crc kubenswrapper[4875]: I1007 08:24:42.634687 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/7af841b2-2a3c-4cea-a5b1-5f854609190b-ssh-key\") pod \"7af841b2-2a3c-4cea-a5b1-5f854609190b\" (UID: \"7af841b2-2a3c-4cea-a5b1-5f854609190b\") " Oct 07 08:24:42 crc kubenswrapper[4875]: I1007 08:24:42.634750 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7af841b2-2a3c-4cea-a5b1-5f854609190b-inventory\") pod \"7af841b2-2a3c-4cea-a5b1-5f854609190b\" (UID: \"7af841b2-2a3c-4cea-a5b1-5f854609190b\") " Oct 07 08:24:42 crc kubenswrapper[4875]: I1007 08:24:42.634830 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7j9vx\" (UniqueName: \"kubernetes.io/projected/7af841b2-2a3c-4cea-a5b1-5f854609190b-kube-api-access-7j9vx\") pod \"7af841b2-2a3c-4cea-a5b1-5f854609190b\" (UID: \"7af841b2-2a3c-4cea-a5b1-5f854609190b\") " Oct 07 08:24:42 crc kubenswrapper[4875]: I1007 08:24:42.644123 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7af841b2-2a3c-4cea-a5b1-5f854609190b-kube-api-access-7j9vx" (OuterVolumeSpecName: "kube-api-access-7j9vx") pod "7af841b2-2a3c-4cea-a5b1-5f854609190b" (UID: "7af841b2-2a3c-4cea-a5b1-5f854609190b"). InnerVolumeSpecName "kube-api-access-7j9vx". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 08:24:42 crc kubenswrapper[4875]: I1007 08:24:42.663033 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7af841b2-2a3c-4cea-a5b1-5f854609190b-inventory" (OuterVolumeSpecName: "inventory") pod "7af841b2-2a3c-4cea-a5b1-5f854609190b" (UID: "7af841b2-2a3c-4cea-a5b1-5f854609190b"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:24:42 crc kubenswrapper[4875]: I1007 08:24:42.665420 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7af841b2-2a3c-4cea-a5b1-5f854609190b-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "7af841b2-2a3c-4cea-a5b1-5f854609190b" (UID: "7af841b2-2a3c-4cea-a5b1-5f854609190b"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:24:42 crc kubenswrapper[4875]: I1007 08:24:42.738993 4875 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/7af841b2-2a3c-4cea-a5b1-5f854609190b-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 07 08:24:42 crc kubenswrapper[4875]: I1007 08:24:42.739033 4875 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7af841b2-2a3c-4cea-a5b1-5f854609190b-inventory\") on node \"crc\" DevicePath \"\"" Oct 07 08:24:42 crc kubenswrapper[4875]: I1007 08:24:42.739043 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7j9vx\" (UniqueName: \"kubernetes.io/projected/7af841b2-2a3c-4cea-a5b1-5f854609190b-kube-api-access-7j9vx\") on node \"crc\" DevicePath \"\"" Oct 07 08:24:43 crc kubenswrapper[4875]: I1007 08:24:43.018573 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-jxf7h" event={"ID":"7af841b2-2a3c-4cea-a5b1-5f854609190b","Type":"ContainerDied","Data":"c0b339d1b6636d32577eae878c5097fbd62a36fc67bee1a1fa70cde3e32e749f"} Oct 07 08:24:43 crc kubenswrapper[4875]: I1007 08:24:43.018615 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-jxf7h" Oct 07 08:24:43 crc kubenswrapper[4875]: I1007 08:24:43.018621 4875 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c0b339d1b6636d32577eae878c5097fbd62a36fc67bee1a1fa70cde3e32e749f" Oct 07 08:24:43 crc kubenswrapper[4875]: I1007 08:24:43.107043 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-lzlw5"] Oct 07 08:24:43 crc kubenswrapper[4875]: E1007 08:24:43.107582 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7af841b2-2a3c-4cea-a5b1-5f854609190b" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Oct 07 08:24:43 crc kubenswrapper[4875]: I1007 08:24:43.107604 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="7af841b2-2a3c-4cea-a5b1-5f854609190b" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Oct 07 08:24:43 crc kubenswrapper[4875]: I1007 08:24:43.107839 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="7af841b2-2a3c-4cea-a5b1-5f854609190b" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Oct 07 08:24:43 crc kubenswrapper[4875]: I1007 08:24:43.108574 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-lzlw5" Oct 07 08:24:43 crc kubenswrapper[4875]: I1007 08:24:43.111460 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 07 08:24:43 crc kubenswrapper[4875]: I1007 08:24:43.111616 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-bl4pq" Oct 07 08:24:43 crc kubenswrapper[4875]: I1007 08:24:43.112184 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 07 08:24:43 crc kubenswrapper[4875]: I1007 08:24:43.112182 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 07 08:24:43 crc kubenswrapper[4875]: I1007 08:24:43.121656 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-lzlw5"] Oct 07 08:24:43 crc kubenswrapper[4875]: I1007 08:24:43.249533 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7182b81e-0002-4025-ab76-31844db2d768-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-lzlw5\" (UID: \"7182b81e-0002-4025-ab76-31844db2d768\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-lzlw5" Oct 07 08:24:43 crc kubenswrapper[4875]: I1007 08:24:43.249580 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/7182b81e-0002-4025-ab76-31844db2d768-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-lzlw5\" (UID: \"7182b81e-0002-4025-ab76-31844db2d768\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-lzlw5" Oct 07 08:24:43 crc kubenswrapper[4875]: I1007 08:24:43.249715 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bdhxt\" (UniqueName: \"kubernetes.io/projected/7182b81e-0002-4025-ab76-31844db2d768-kube-api-access-bdhxt\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-lzlw5\" (UID: \"7182b81e-0002-4025-ab76-31844db2d768\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-lzlw5" Oct 07 08:24:43 crc kubenswrapper[4875]: I1007 08:24:43.351552 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bdhxt\" (UniqueName: \"kubernetes.io/projected/7182b81e-0002-4025-ab76-31844db2d768-kube-api-access-bdhxt\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-lzlw5\" (UID: \"7182b81e-0002-4025-ab76-31844db2d768\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-lzlw5" Oct 07 08:24:43 crc kubenswrapper[4875]: I1007 08:24:43.351638 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7182b81e-0002-4025-ab76-31844db2d768-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-lzlw5\" (UID: \"7182b81e-0002-4025-ab76-31844db2d768\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-lzlw5" Oct 07 08:24:43 crc kubenswrapper[4875]: I1007 08:24:43.351668 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/7182b81e-0002-4025-ab76-31844db2d768-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-lzlw5\" (UID: \"7182b81e-0002-4025-ab76-31844db2d768\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-lzlw5" Oct 07 08:24:43 crc kubenswrapper[4875]: I1007 08:24:43.362688 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/7182b81e-0002-4025-ab76-31844db2d768-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-lzlw5\" (UID: \"7182b81e-0002-4025-ab76-31844db2d768\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-lzlw5" Oct 07 08:24:43 crc kubenswrapper[4875]: I1007 08:24:43.362740 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7182b81e-0002-4025-ab76-31844db2d768-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-lzlw5\" (UID: \"7182b81e-0002-4025-ab76-31844db2d768\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-lzlw5" Oct 07 08:24:43 crc kubenswrapper[4875]: I1007 08:24:43.376685 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bdhxt\" (UniqueName: \"kubernetes.io/projected/7182b81e-0002-4025-ab76-31844db2d768-kube-api-access-bdhxt\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-lzlw5\" (UID: \"7182b81e-0002-4025-ab76-31844db2d768\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-lzlw5" Oct 07 08:24:43 crc kubenswrapper[4875]: I1007 08:24:43.426923 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-lzlw5" Oct 07 08:24:43 crc kubenswrapper[4875]: I1007 08:24:43.993839 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-lzlw5"] Oct 07 08:24:44 crc kubenswrapper[4875]: I1007 08:24:44.029545 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-lzlw5" event={"ID":"7182b81e-0002-4025-ab76-31844db2d768","Type":"ContainerStarted","Data":"5ce3d3e0a7551bef9839fd48d7462ff15851e2663fe1b7aaec2e364ec98eccca"} Oct 07 08:24:45 crc kubenswrapper[4875]: I1007 08:24:45.038027 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-db-create-xkx25"] Oct 07 08:24:45 crc kubenswrapper[4875]: I1007 08:24:45.048199 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-lzlw5" event={"ID":"7182b81e-0002-4025-ab76-31844db2d768","Type":"ContainerStarted","Data":"118395b587740e838b55d2de12f1b5f897af59d3576a18c52a3e4251f4ce57f9"} Oct 07 08:24:45 crc kubenswrapper[4875]: I1007 08:24:45.051856 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-db-create-spt5r"] Oct 07 08:24:45 crc kubenswrapper[4875]: I1007 08:24:45.059993 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-db-create-x7f47"] Oct 07 08:24:45 crc kubenswrapper[4875]: I1007 08:24:45.066839 4875 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-db-create-x7f47"] Oct 07 08:24:45 crc kubenswrapper[4875]: I1007 08:24:45.073709 4875 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-db-create-spt5r"] Oct 07 08:24:45 crc kubenswrapper[4875]: I1007 08:24:45.080357 4875 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-db-create-xkx25"] Oct 07 08:24:45 crc kubenswrapper[4875]: I1007 08:24:45.080464 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-lzlw5" podStartSLOduration=1.495811599 podStartE2EDuration="2.080448087s" podCreationTimestamp="2025-10-07 08:24:43 +0000 UTC" firstStartedPulling="2025-10-07 08:24:43.997533793 +0000 UTC m=+1708.957304336" lastFinishedPulling="2025-10-07 08:24:44.582170271 +0000 UTC m=+1709.541940824" observedRunningTime="2025-10-07 08:24:45.061798931 +0000 UTC m=+1710.021569484" watchObservedRunningTime="2025-10-07 08:24:45.080448087 +0000 UTC m=+1710.040218630" Oct 07 08:24:45 crc kubenswrapper[4875]: I1007 08:24:45.708753 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="39a4c6c6-dcf7-4d49-9a79-44258c5db723" path="/var/lib/kubelet/pods/39a4c6c6-dcf7-4d49-9a79-44258c5db723/volumes" Oct 07 08:24:45 crc kubenswrapper[4875]: I1007 08:24:45.710071 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5e168f8b-b075-472f-87c8-84a101d9ffda" path="/var/lib/kubelet/pods/5e168f8b-b075-472f-87c8-84a101d9ffda/volumes" Oct 07 08:24:45 crc kubenswrapper[4875]: I1007 08:24:45.711245 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ceb89a19-ace2-4804-8d6d-236266c6d7d9" path="/var/lib/kubelet/pods/ceb89a19-ace2-4804-8d6d-236266c6d7d9/volumes" Oct 07 08:24:45 crc kubenswrapper[4875]: I1007 08:24:45.770079 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-5c52s" Oct 07 08:24:45 crc kubenswrapper[4875]: I1007 08:24:45.770161 4875 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-5c52s" Oct 07 08:24:45 crc kubenswrapper[4875]: I1007 08:24:45.831192 4875 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-5c52s" Oct 07 08:24:46 crc kubenswrapper[4875]: I1007 08:24:46.102915 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-5c52s" Oct 07 08:24:46 crc kubenswrapper[4875]: I1007 08:24:46.152744 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-5c52s"] Oct 07 08:24:48 crc kubenswrapper[4875]: I1007 08:24:48.077580 4875 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-5c52s" podUID="ea5f54bb-b3bb-429e-a5d2-05d983027f85" containerName="registry-server" containerID="cri-o://ab1ef24cd39a42ca0817874bc955d4be77f7b27564bbc1042f771245af77ec63" gracePeriod=2 Oct 07 08:24:48 crc kubenswrapper[4875]: I1007 08:24:48.524630 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-5c52s" Oct 07 08:24:48 crc kubenswrapper[4875]: I1007 08:24:48.651955 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ea5f54bb-b3bb-429e-a5d2-05d983027f85-catalog-content\") pod \"ea5f54bb-b3bb-429e-a5d2-05d983027f85\" (UID: \"ea5f54bb-b3bb-429e-a5d2-05d983027f85\") " Oct 07 08:24:48 crc kubenswrapper[4875]: I1007 08:24:48.652014 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nsmlw\" (UniqueName: \"kubernetes.io/projected/ea5f54bb-b3bb-429e-a5d2-05d983027f85-kube-api-access-nsmlw\") pod \"ea5f54bb-b3bb-429e-a5d2-05d983027f85\" (UID: \"ea5f54bb-b3bb-429e-a5d2-05d983027f85\") " Oct 07 08:24:48 crc kubenswrapper[4875]: I1007 08:24:48.652083 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ea5f54bb-b3bb-429e-a5d2-05d983027f85-utilities\") pod \"ea5f54bb-b3bb-429e-a5d2-05d983027f85\" (UID: \"ea5f54bb-b3bb-429e-a5d2-05d983027f85\") " Oct 07 08:24:48 crc kubenswrapper[4875]: I1007 08:24:48.653168 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ea5f54bb-b3bb-429e-a5d2-05d983027f85-utilities" (OuterVolumeSpecName: "utilities") pod "ea5f54bb-b3bb-429e-a5d2-05d983027f85" (UID: "ea5f54bb-b3bb-429e-a5d2-05d983027f85"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 08:24:48 crc kubenswrapper[4875]: I1007 08:24:48.657047 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ea5f54bb-b3bb-429e-a5d2-05d983027f85-kube-api-access-nsmlw" (OuterVolumeSpecName: "kube-api-access-nsmlw") pod "ea5f54bb-b3bb-429e-a5d2-05d983027f85" (UID: "ea5f54bb-b3bb-429e-a5d2-05d983027f85"). InnerVolumeSpecName "kube-api-access-nsmlw". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 08:24:48 crc kubenswrapper[4875]: I1007 08:24:48.754995 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nsmlw\" (UniqueName: \"kubernetes.io/projected/ea5f54bb-b3bb-429e-a5d2-05d983027f85-kube-api-access-nsmlw\") on node \"crc\" DevicePath \"\"" Oct 07 08:24:48 crc kubenswrapper[4875]: I1007 08:24:48.755038 4875 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ea5f54bb-b3bb-429e-a5d2-05d983027f85-utilities\") on node \"crc\" DevicePath \"\"" Oct 07 08:24:49 crc kubenswrapper[4875]: I1007 08:24:49.087407 4875 generic.go:334] "Generic (PLEG): container finished" podID="ea5f54bb-b3bb-429e-a5d2-05d983027f85" containerID="ab1ef24cd39a42ca0817874bc955d4be77f7b27564bbc1042f771245af77ec63" exitCode=0 Oct 07 08:24:49 crc kubenswrapper[4875]: I1007 08:24:49.087441 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5c52s" event={"ID":"ea5f54bb-b3bb-429e-a5d2-05d983027f85","Type":"ContainerDied","Data":"ab1ef24cd39a42ca0817874bc955d4be77f7b27564bbc1042f771245af77ec63"} Oct 07 08:24:49 crc kubenswrapper[4875]: I1007 08:24:49.087465 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5c52s" event={"ID":"ea5f54bb-b3bb-429e-a5d2-05d983027f85","Type":"ContainerDied","Data":"51e99fb082d6134eeffeb7624c8a7874a773fc2c6d66d069d56d7b2c0bebb948"} Oct 07 08:24:49 crc kubenswrapper[4875]: I1007 08:24:49.087487 4875 scope.go:117] "RemoveContainer" containerID="ab1ef24cd39a42ca0817874bc955d4be77f7b27564bbc1042f771245af77ec63" Oct 07 08:24:49 crc kubenswrapper[4875]: I1007 08:24:49.088156 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-5c52s" Oct 07 08:24:49 crc kubenswrapper[4875]: I1007 08:24:49.111115 4875 scope.go:117] "RemoveContainer" containerID="9dcccdeac5eff5cc15ed189b74504c98dc8a83a99fd99db703bfdb761aa6e8ee" Oct 07 08:24:49 crc kubenswrapper[4875]: I1007 08:24:49.131590 4875 scope.go:117] "RemoveContainer" containerID="d3b7ed90fe3404cfc11ce1b4da15db346ffd3bf319f5bc6fc796a36a7bd9be90" Oct 07 08:24:49 crc kubenswrapper[4875]: I1007 08:24:49.185849 4875 scope.go:117] "RemoveContainer" containerID="ab1ef24cd39a42ca0817874bc955d4be77f7b27564bbc1042f771245af77ec63" Oct 07 08:24:49 crc kubenswrapper[4875]: E1007 08:24:49.186249 4875 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ab1ef24cd39a42ca0817874bc955d4be77f7b27564bbc1042f771245af77ec63\": container with ID starting with ab1ef24cd39a42ca0817874bc955d4be77f7b27564bbc1042f771245af77ec63 not found: ID does not exist" containerID="ab1ef24cd39a42ca0817874bc955d4be77f7b27564bbc1042f771245af77ec63" Oct 07 08:24:49 crc kubenswrapper[4875]: I1007 08:24:49.186296 4875 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ab1ef24cd39a42ca0817874bc955d4be77f7b27564bbc1042f771245af77ec63"} err="failed to get container status \"ab1ef24cd39a42ca0817874bc955d4be77f7b27564bbc1042f771245af77ec63\": rpc error: code = NotFound desc = could not find container \"ab1ef24cd39a42ca0817874bc955d4be77f7b27564bbc1042f771245af77ec63\": container with ID starting with ab1ef24cd39a42ca0817874bc955d4be77f7b27564bbc1042f771245af77ec63 not found: ID does not exist" Oct 07 08:24:49 crc kubenswrapper[4875]: I1007 08:24:49.186330 4875 scope.go:117] "RemoveContainer" containerID="9dcccdeac5eff5cc15ed189b74504c98dc8a83a99fd99db703bfdb761aa6e8ee" Oct 07 08:24:49 crc kubenswrapper[4875]: E1007 08:24:49.186617 4875 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9dcccdeac5eff5cc15ed189b74504c98dc8a83a99fd99db703bfdb761aa6e8ee\": container with ID starting with 9dcccdeac5eff5cc15ed189b74504c98dc8a83a99fd99db703bfdb761aa6e8ee not found: ID does not exist" containerID="9dcccdeac5eff5cc15ed189b74504c98dc8a83a99fd99db703bfdb761aa6e8ee" Oct 07 08:24:49 crc kubenswrapper[4875]: I1007 08:24:49.186649 4875 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9dcccdeac5eff5cc15ed189b74504c98dc8a83a99fd99db703bfdb761aa6e8ee"} err="failed to get container status \"9dcccdeac5eff5cc15ed189b74504c98dc8a83a99fd99db703bfdb761aa6e8ee\": rpc error: code = NotFound desc = could not find container \"9dcccdeac5eff5cc15ed189b74504c98dc8a83a99fd99db703bfdb761aa6e8ee\": container with ID starting with 9dcccdeac5eff5cc15ed189b74504c98dc8a83a99fd99db703bfdb761aa6e8ee not found: ID does not exist" Oct 07 08:24:49 crc kubenswrapper[4875]: I1007 08:24:49.186673 4875 scope.go:117] "RemoveContainer" containerID="d3b7ed90fe3404cfc11ce1b4da15db346ffd3bf319f5bc6fc796a36a7bd9be90" Oct 07 08:24:49 crc kubenswrapper[4875]: E1007 08:24:49.186928 4875 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d3b7ed90fe3404cfc11ce1b4da15db346ffd3bf319f5bc6fc796a36a7bd9be90\": container with ID starting with d3b7ed90fe3404cfc11ce1b4da15db346ffd3bf319f5bc6fc796a36a7bd9be90 not found: ID does not exist" containerID="d3b7ed90fe3404cfc11ce1b4da15db346ffd3bf319f5bc6fc796a36a7bd9be90" Oct 07 08:24:49 crc kubenswrapper[4875]: I1007 08:24:49.186957 4875 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d3b7ed90fe3404cfc11ce1b4da15db346ffd3bf319f5bc6fc796a36a7bd9be90"} err="failed to get container status \"d3b7ed90fe3404cfc11ce1b4da15db346ffd3bf319f5bc6fc796a36a7bd9be90\": rpc error: code = NotFound desc = could not find container \"d3b7ed90fe3404cfc11ce1b4da15db346ffd3bf319f5bc6fc796a36a7bd9be90\": container with ID starting with d3b7ed90fe3404cfc11ce1b4da15db346ffd3bf319f5bc6fc796a36a7bd9be90 not found: ID does not exist" Oct 07 08:24:49 crc kubenswrapper[4875]: I1007 08:24:49.471306 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ea5f54bb-b3bb-429e-a5d2-05d983027f85-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ea5f54bb-b3bb-429e-a5d2-05d983027f85" (UID: "ea5f54bb-b3bb-429e-a5d2-05d983027f85"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 08:24:49 crc kubenswrapper[4875]: I1007 08:24:49.569370 4875 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ea5f54bb-b3bb-429e-a5d2-05d983027f85-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 07 08:24:49 crc kubenswrapper[4875]: I1007 08:24:49.795827 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-5c52s"] Oct 07 08:24:49 crc kubenswrapper[4875]: I1007 08:24:49.802462 4875 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-5c52s"] Oct 07 08:24:50 crc kubenswrapper[4875]: I1007 08:24:50.096068 4875 generic.go:334] "Generic (PLEG): container finished" podID="7182b81e-0002-4025-ab76-31844db2d768" containerID="118395b587740e838b55d2de12f1b5f897af59d3576a18c52a3e4251f4ce57f9" exitCode=0 Oct 07 08:24:50 crc kubenswrapper[4875]: I1007 08:24:50.096104 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-lzlw5" event={"ID":"7182b81e-0002-4025-ab76-31844db2d768","Type":"ContainerDied","Data":"118395b587740e838b55d2de12f1b5f897af59d3576a18c52a3e4251f4ce57f9"} Oct 07 08:24:51 crc kubenswrapper[4875]: I1007 08:24:51.563134 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-lzlw5" Oct 07 08:24:51 crc kubenswrapper[4875]: I1007 08:24:51.717508 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bdhxt\" (UniqueName: \"kubernetes.io/projected/7182b81e-0002-4025-ab76-31844db2d768-kube-api-access-bdhxt\") pod \"7182b81e-0002-4025-ab76-31844db2d768\" (UID: \"7182b81e-0002-4025-ab76-31844db2d768\") " Oct 07 08:24:51 crc kubenswrapper[4875]: I1007 08:24:51.717787 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7182b81e-0002-4025-ab76-31844db2d768-inventory\") pod \"7182b81e-0002-4025-ab76-31844db2d768\" (UID: \"7182b81e-0002-4025-ab76-31844db2d768\") " Oct 07 08:24:51 crc kubenswrapper[4875]: I1007 08:24:51.718375 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/7182b81e-0002-4025-ab76-31844db2d768-ssh-key\") pod \"7182b81e-0002-4025-ab76-31844db2d768\" (UID: \"7182b81e-0002-4025-ab76-31844db2d768\") " Oct 07 08:24:51 crc kubenswrapper[4875]: I1007 08:24:51.723538 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7182b81e-0002-4025-ab76-31844db2d768-kube-api-access-bdhxt" (OuterVolumeSpecName: "kube-api-access-bdhxt") pod "7182b81e-0002-4025-ab76-31844db2d768" (UID: "7182b81e-0002-4025-ab76-31844db2d768"). InnerVolumeSpecName "kube-api-access-bdhxt". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 08:24:51 crc kubenswrapper[4875]: I1007 08:24:51.724266 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ea5f54bb-b3bb-429e-a5d2-05d983027f85" path="/var/lib/kubelet/pods/ea5f54bb-b3bb-429e-a5d2-05d983027f85/volumes" Oct 07 08:24:51 crc kubenswrapper[4875]: I1007 08:24:51.743442 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7182b81e-0002-4025-ab76-31844db2d768-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "7182b81e-0002-4025-ab76-31844db2d768" (UID: "7182b81e-0002-4025-ab76-31844db2d768"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:24:51 crc kubenswrapper[4875]: I1007 08:24:51.754513 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7182b81e-0002-4025-ab76-31844db2d768-inventory" (OuterVolumeSpecName: "inventory") pod "7182b81e-0002-4025-ab76-31844db2d768" (UID: "7182b81e-0002-4025-ab76-31844db2d768"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:24:51 crc kubenswrapper[4875]: I1007 08:24:51.822568 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bdhxt\" (UniqueName: \"kubernetes.io/projected/7182b81e-0002-4025-ab76-31844db2d768-kube-api-access-bdhxt\") on node \"crc\" DevicePath \"\"" Oct 07 08:24:51 crc kubenswrapper[4875]: I1007 08:24:51.822604 4875 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7182b81e-0002-4025-ab76-31844db2d768-inventory\") on node \"crc\" DevicePath \"\"" Oct 07 08:24:51 crc kubenswrapper[4875]: I1007 08:24:51.822617 4875 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/7182b81e-0002-4025-ab76-31844db2d768-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 07 08:24:52 crc kubenswrapper[4875]: I1007 08:24:52.026769 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-b31b-account-create-pgmkn"] Oct 07 08:24:52 crc kubenswrapper[4875]: I1007 08:24:52.037896 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-892a-account-create-5s8cf"] Oct 07 08:24:52 crc kubenswrapper[4875]: I1007 08:24:52.045125 4875 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-892a-account-create-5s8cf"] Oct 07 08:24:52 crc kubenswrapper[4875]: I1007 08:24:52.052474 4875 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-b31b-account-create-pgmkn"] Oct 07 08:24:52 crc kubenswrapper[4875]: I1007 08:24:52.118294 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-lzlw5" event={"ID":"7182b81e-0002-4025-ab76-31844db2d768","Type":"ContainerDied","Data":"5ce3d3e0a7551bef9839fd48d7462ff15851e2663fe1b7aaec2e364ec98eccca"} Oct 07 08:24:52 crc kubenswrapper[4875]: I1007 08:24:52.118339 4875 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5ce3d3e0a7551bef9839fd48d7462ff15851e2663fe1b7aaec2e364ec98eccca" Oct 07 08:24:52 crc kubenswrapper[4875]: I1007 08:24:52.118380 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-lzlw5" Oct 07 08:24:52 crc kubenswrapper[4875]: I1007 08:24:52.223269 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-jwr5d"] Oct 07 08:24:52 crc kubenswrapper[4875]: E1007 08:24:52.223979 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ea5f54bb-b3bb-429e-a5d2-05d983027f85" containerName="registry-server" Oct 07 08:24:52 crc kubenswrapper[4875]: I1007 08:24:52.223996 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="ea5f54bb-b3bb-429e-a5d2-05d983027f85" containerName="registry-server" Oct 07 08:24:52 crc kubenswrapper[4875]: E1007 08:24:52.224035 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7182b81e-0002-4025-ab76-31844db2d768" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Oct 07 08:24:52 crc kubenswrapper[4875]: I1007 08:24:52.224046 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="7182b81e-0002-4025-ab76-31844db2d768" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Oct 07 08:24:52 crc kubenswrapper[4875]: E1007 08:24:52.224099 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ea5f54bb-b3bb-429e-a5d2-05d983027f85" containerName="extract-utilities" Oct 07 08:24:52 crc kubenswrapper[4875]: I1007 08:24:52.224109 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="ea5f54bb-b3bb-429e-a5d2-05d983027f85" containerName="extract-utilities" Oct 07 08:24:52 crc kubenswrapper[4875]: E1007 08:24:52.224138 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ea5f54bb-b3bb-429e-a5d2-05d983027f85" containerName="extract-content" Oct 07 08:24:52 crc kubenswrapper[4875]: I1007 08:24:52.224147 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="ea5f54bb-b3bb-429e-a5d2-05d983027f85" containerName="extract-content" Oct 07 08:24:52 crc kubenswrapper[4875]: I1007 08:24:52.224456 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="7182b81e-0002-4025-ab76-31844db2d768" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Oct 07 08:24:52 crc kubenswrapper[4875]: I1007 08:24:52.224541 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="ea5f54bb-b3bb-429e-a5d2-05d983027f85" containerName="registry-server" Oct 07 08:24:52 crc kubenswrapper[4875]: I1007 08:24:52.226568 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-jwr5d"] Oct 07 08:24:52 crc kubenswrapper[4875]: I1007 08:24:52.226792 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-jwr5d" Oct 07 08:24:52 crc kubenswrapper[4875]: I1007 08:24:52.235346 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 07 08:24:52 crc kubenswrapper[4875]: I1007 08:24:52.235781 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-bl4pq" Oct 07 08:24:52 crc kubenswrapper[4875]: I1007 08:24:52.236425 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 07 08:24:52 crc kubenswrapper[4875]: I1007 08:24:52.237117 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 07 08:24:52 crc kubenswrapper[4875]: I1007 08:24:52.336956 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/489def06-7200-4a3d-9d81-a811bac28712-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-jwr5d\" (UID: \"489def06-7200-4a3d-9d81-a811bac28712\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-jwr5d" Oct 07 08:24:52 crc kubenswrapper[4875]: I1007 08:24:52.337053 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/489def06-7200-4a3d-9d81-a811bac28712-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-jwr5d\" (UID: \"489def06-7200-4a3d-9d81-a811bac28712\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-jwr5d" Oct 07 08:24:52 crc kubenswrapper[4875]: I1007 08:24:52.337106 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9pstp\" (UniqueName: \"kubernetes.io/projected/489def06-7200-4a3d-9d81-a811bac28712-kube-api-access-9pstp\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-jwr5d\" (UID: \"489def06-7200-4a3d-9d81-a811bac28712\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-jwr5d" Oct 07 08:24:52 crc kubenswrapper[4875]: I1007 08:24:52.438805 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9pstp\" (UniqueName: \"kubernetes.io/projected/489def06-7200-4a3d-9d81-a811bac28712-kube-api-access-9pstp\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-jwr5d\" (UID: \"489def06-7200-4a3d-9d81-a811bac28712\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-jwr5d" Oct 07 08:24:52 crc kubenswrapper[4875]: I1007 08:24:52.439000 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/489def06-7200-4a3d-9d81-a811bac28712-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-jwr5d\" (UID: \"489def06-7200-4a3d-9d81-a811bac28712\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-jwr5d" Oct 07 08:24:52 crc kubenswrapper[4875]: I1007 08:24:52.439256 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/489def06-7200-4a3d-9d81-a811bac28712-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-jwr5d\" (UID: \"489def06-7200-4a3d-9d81-a811bac28712\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-jwr5d" Oct 07 08:24:52 crc kubenswrapper[4875]: I1007 08:24:52.443920 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/489def06-7200-4a3d-9d81-a811bac28712-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-jwr5d\" (UID: \"489def06-7200-4a3d-9d81-a811bac28712\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-jwr5d" Oct 07 08:24:52 crc kubenswrapper[4875]: I1007 08:24:52.445582 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/489def06-7200-4a3d-9d81-a811bac28712-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-jwr5d\" (UID: \"489def06-7200-4a3d-9d81-a811bac28712\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-jwr5d" Oct 07 08:24:52 crc kubenswrapper[4875]: I1007 08:24:52.458813 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9pstp\" (UniqueName: \"kubernetes.io/projected/489def06-7200-4a3d-9d81-a811bac28712-kube-api-access-9pstp\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-jwr5d\" (UID: \"489def06-7200-4a3d-9d81-a811bac28712\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-jwr5d" Oct 07 08:24:52 crc kubenswrapper[4875]: I1007 08:24:52.561621 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-jwr5d" Oct 07 08:24:53 crc kubenswrapper[4875]: W1007 08:24:53.100568 4875 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod489def06_7200_4a3d_9d81_a811bac28712.slice/crio-b50ec52b623e771e76dc8457aa7bbcceb5dea2b810e7c711cf65dc66a873d674 WatchSource:0}: Error finding container b50ec52b623e771e76dc8457aa7bbcceb5dea2b810e7c711cf65dc66a873d674: Status 404 returned error can't find the container with id b50ec52b623e771e76dc8457aa7bbcceb5dea2b810e7c711cf65dc66a873d674 Oct 07 08:24:53 crc kubenswrapper[4875]: I1007 08:24:53.100788 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-jwr5d"] Oct 07 08:24:53 crc kubenswrapper[4875]: I1007 08:24:53.132320 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-jwr5d" event={"ID":"489def06-7200-4a3d-9d81-a811bac28712","Type":"ContainerStarted","Data":"b50ec52b623e771e76dc8457aa7bbcceb5dea2b810e7c711cf65dc66a873d674"} Oct 07 08:24:53 crc kubenswrapper[4875]: I1007 08:24:53.710424 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="733d2913-becf-4e28-b1a8-c8c510bd1fba" path="/var/lib/kubelet/pods/733d2913-becf-4e28-b1a8-c8c510bd1fba/volumes" Oct 07 08:24:53 crc kubenswrapper[4875]: I1007 08:24:53.711006 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c95bfab8-a736-486e-ab63-99652c643651" path="/var/lib/kubelet/pods/c95bfab8-a736-486e-ab63-99652c643651/volumes" Oct 07 08:24:54 crc kubenswrapper[4875]: I1007 08:24:54.143330 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-jwr5d" event={"ID":"489def06-7200-4a3d-9d81-a811bac28712","Type":"ContainerStarted","Data":"919f684794a2c11ad9d29a0eb09164f33b6bc516f894fa4735d3dff54c986c89"} Oct 07 08:24:55 crc kubenswrapper[4875]: I1007 08:24:55.716772 4875 scope.go:117] "RemoveContainer" containerID="e7a8b49c8a3c1b65fb51ee691d301a717bb3f37d230c7e4911b755a2a993aaef" Oct 07 08:24:55 crc kubenswrapper[4875]: E1007 08:24:55.722426 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hx68m_openshift-machine-config-operator(3928c10c-c3da-41eb-96b2-629d67cfb31f)\"" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" podUID="3928c10c-c3da-41eb-96b2-629d67cfb31f" Oct 07 08:25:07 crc kubenswrapper[4875]: I1007 08:25:07.039449 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-jwr5d" podStartSLOduration=14.57966034 podStartE2EDuration="15.039432024s" podCreationTimestamp="2025-10-07 08:24:52 +0000 UTC" firstStartedPulling="2025-10-07 08:24:53.103163105 +0000 UTC m=+1718.062933648" lastFinishedPulling="2025-10-07 08:24:53.562934789 +0000 UTC m=+1718.522705332" observedRunningTime="2025-10-07 08:24:54.163954352 +0000 UTC m=+1719.123724915" watchObservedRunningTime="2025-10-07 08:25:07.039432024 +0000 UTC m=+1731.999202567" Oct 07 08:25:07 crc kubenswrapper[4875]: I1007 08:25:07.046422 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-1ccd-account-create-8gkv6"] Oct 07 08:25:07 crc kubenswrapper[4875]: I1007 08:25:07.053245 4875 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-1ccd-account-create-8gkv6"] Oct 07 08:25:07 crc kubenswrapper[4875]: I1007 08:25:07.708577 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4861a958-1085-4ebf-a62a-8566468cbdad" path="/var/lib/kubelet/pods/4861a958-1085-4ebf-a62a-8566468cbdad/volumes" Oct 07 08:25:10 crc kubenswrapper[4875]: I1007 08:25:10.698379 4875 scope.go:117] "RemoveContainer" containerID="e7a8b49c8a3c1b65fb51ee691d301a717bb3f37d230c7e4911b755a2a993aaef" Oct 07 08:25:10 crc kubenswrapper[4875]: E1007 08:25:10.699180 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hx68m_openshift-machine-config-operator(3928c10c-c3da-41eb-96b2-629d67cfb31f)\"" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" podUID="3928c10c-c3da-41eb-96b2-629d67cfb31f" Oct 07 08:25:19 crc kubenswrapper[4875]: I1007 08:25:19.039077 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-8bg6r"] Oct 07 08:25:19 crc kubenswrapper[4875]: I1007 08:25:19.048595 4875 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-8bg6r"] Oct 07 08:25:19 crc kubenswrapper[4875]: I1007 08:25:19.697252 4875 scope.go:117] "RemoveContainer" containerID="27e74e6769fb1785312527ebfca9300df57c740afc9ed08b65ad7431632db5ed" Oct 07 08:25:19 crc kubenswrapper[4875]: I1007 08:25:19.726843 4875 scope.go:117] "RemoveContainer" containerID="b32bfeee11314ba06dea3eb1c48ed2b56b1524d234d3fdba24eb9be7de4ea1a9" Oct 07 08:25:19 crc kubenswrapper[4875]: I1007 08:25:19.728647 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3f6edd8b-6c78-4ab9-9cbc-49b66303467d" path="/var/lib/kubelet/pods/3f6edd8b-6c78-4ab9-9cbc-49b66303467d/volumes" Oct 07 08:25:19 crc kubenswrapper[4875]: I1007 08:25:19.791932 4875 scope.go:117] "RemoveContainer" containerID="4368636d8afeaf9948fee4dad41b7134d140462d62c2e5b6cf6300355e078e1e" Oct 07 08:25:19 crc kubenswrapper[4875]: I1007 08:25:19.828676 4875 scope.go:117] "RemoveContainer" containerID="7e860d2be46efac7c7d8ff938355c6c487a90214ba868fa27403da0efd4b8d39" Oct 07 08:25:19 crc kubenswrapper[4875]: I1007 08:25:19.880809 4875 scope.go:117] "RemoveContainer" containerID="823297c8a6a8bac1329d0a5274c85040fdbba71e878f34ec06d16c67bfb83770" Oct 07 08:25:19 crc kubenswrapper[4875]: I1007 08:25:19.949753 4875 scope.go:117] "RemoveContainer" containerID="1e570e6a35fda4ee948de23ad26f6b10fc9e46be47a6cf82678622f6abcc709d" Oct 07 08:25:19 crc kubenswrapper[4875]: I1007 08:25:19.994704 4875 scope.go:117] "RemoveContainer" containerID="251b6fba44dd7d45dde7ca92d7e21074816028b46c47c0cd064b7c7e3a921777" Oct 07 08:25:23 crc kubenswrapper[4875]: I1007 08:25:23.698657 4875 scope.go:117] "RemoveContainer" containerID="e7a8b49c8a3c1b65fb51ee691d301a717bb3f37d230c7e4911b755a2a993aaef" Oct 07 08:25:23 crc kubenswrapper[4875]: E1007 08:25:23.699533 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hx68m_openshift-machine-config-operator(3928c10c-c3da-41eb-96b2-629d67cfb31f)\"" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" podUID="3928c10c-c3da-41eb-96b2-629d67cfb31f" Oct 07 08:25:32 crc kubenswrapper[4875]: I1007 08:25:32.513404 4875 generic.go:334] "Generic (PLEG): container finished" podID="489def06-7200-4a3d-9d81-a811bac28712" containerID="919f684794a2c11ad9d29a0eb09164f33b6bc516f894fa4735d3dff54c986c89" exitCode=0 Oct 07 08:25:32 crc kubenswrapper[4875]: I1007 08:25:32.513489 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-jwr5d" event={"ID":"489def06-7200-4a3d-9d81-a811bac28712","Type":"ContainerDied","Data":"919f684794a2c11ad9d29a0eb09164f33b6bc516f894fa4735d3dff54c986c89"} Oct 07 08:25:33 crc kubenswrapper[4875]: I1007 08:25:33.886635 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-jwr5d" Oct 07 08:25:34 crc kubenswrapper[4875]: I1007 08:25:34.058515 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9pstp\" (UniqueName: \"kubernetes.io/projected/489def06-7200-4a3d-9d81-a811bac28712-kube-api-access-9pstp\") pod \"489def06-7200-4a3d-9d81-a811bac28712\" (UID: \"489def06-7200-4a3d-9d81-a811bac28712\") " Oct 07 08:25:34 crc kubenswrapper[4875]: I1007 08:25:34.058582 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/489def06-7200-4a3d-9d81-a811bac28712-ssh-key\") pod \"489def06-7200-4a3d-9d81-a811bac28712\" (UID: \"489def06-7200-4a3d-9d81-a811bac28712\") " Oct 07 08:25:34 crc kubenswrapper[4875]: I1007 08:25:34.058669 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/489def06-7200-4a3d-9d81-a811bac28712-inventory\") pod \"489def06-7200-4a3d-9d81-a811bac28712\" (UID: \"489def06-7200-4a3d-9d81-a811bac28712\") " Oct 07 08:25:34 crc kubenswrapper[4875]: I1007 08:25:34.064268 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/489def06-7200-4a3d-9d81-a811bac28712-kube-api-access-9pstp" (OuterVolumeSpecName: "kube-api-access-9pstp") pod "489def06-7200-4a3d-9d81-a811bac28712" (UID: "489def06-7200-4a3d-9d81-a811bac28712"). InnerVolumeSpecName "kube-api-access-9pstp". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 08:25:34 crc kubenswrapper[4875]: I1007 08:25:34.091399 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/489def06-7200-4a3d-9d81-a811bac28712-inventory" (OuterVolumeSpecName: "inventory") pod "489def06-7200-4a3d-9d81-a811bac28712" (UID: "489def06-7200-4a3d-9d81-a811bac28712"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:25:34 crc kubenswrapper[4875]: I1007 08:25:34.092290 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/489def06-7200-4a3d-9d81-a811bac28712-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "489def06-7200-4a3d-9d81-a811bac28712" (UID: "489def06-7200-4a3d-9d81-a811bac28712"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:25:34 crc kubenswrapper[4875]: I1007 08:25:34.161738 4875 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/489def06-7200-4a3d-9d81-a811bac28712-inventory\") on node \"crc\" DevicePath \"\"" Oct 07 08:25:34 crc kubenswrapper[4875]: I1007 08:25:34.161775 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9pstp\" (UniqueName: \"kubernetes.io/projected/489def06-7200-4a3d-9d81-a811bac28712-kube-api-access-9pstp\") on node \"crc\" DevicePath \"\"" Oct 07 08:25:34 crc kubenswrapper[4875]: I1007 08:25:34.161786 4875 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/489def06-7200-4a3d-9d81-a811bac28712-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 07 08:25:34 crc kubenswrapper[4875]: I1007 08:25:34.530698 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-jwr5d" event={"ID":"489def06-7200-4a3d-9d81-a811bac28712","Type":"ContainerDied","Data":"b50ec52b623e771e76dc8457aa7bbcceb5dea2b810e7c711cf65dc66a873d674"} Oct 07 08:25:34 crc kubenswrapper[4875]: I1007 08:25:34.530744 4875 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b50ec52b623e771e76dc8457aa7bbcceb5dea2b810e7c711cf65dc66a873d674" Oct 07 08:25:34 crc kubenswrapper[4875]: I1007 08:25:34.530788 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-jwr5d" Oct 07 08:25:34 crc kubenswrapper[4875]: I1007 08:25:34.610686 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-8tqzx"] Oct 07 08:25:34 crc kubenswrapper[4875]: E1007 08:25:34.611236 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="489def06-7200-4a3d-9d81-a811bac28712" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Oct 07 08:25:34 crc kubenswrapper[4875]: I1007 08:25:34.611261 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="489def06-7200-4a3d-9d81-a811bac28712" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Oct 07 08:25:34 crc kubenswrapper[4875]: I1007 08:25:34.611494 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="489def06-7200-4a3d-9d81-a811bac28712" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Oct 07 08:25:34 crc kubenswrapper[4875]: I1007 08:25:34.612226 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-8tqzx" Oct 07 08:25:34 crc kubenswrapper[4875]: I1007 08:25:34.618289 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 07 08:25:34 crc kubenswrapper[4875]: I1007 08:25:34.618528 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 07 08:25:34 crc kubenswrapper[4875]: I1007 08:25:34.618424 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 07 08:25:34 crc kubenswrapper[4875]: I1007 08:25:34.618776 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-bl4pq" Oct 07 08:25:34 crc kubenswrapper[4875]: I1007 08:25:34.642008 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-8tqzx"] Oct 07 08:25:34 crc kubenswrapper[4875]: I1007 08:25:34.672310 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/063ffc6e-9bc6-41ea-9d6d-d73e3923c92a-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-8tqzx\" (UID: \"063ffc6e-9bc6-41ea-9d6d-d73e3923c92a\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-8tqzx" Oct 07 08:25:34 crc kubenswrapper[4875]: I1007 08:25:34.672380 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/063ffc6e-9bc6-41ea-9d6d-d73e3923c92a-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-8tqzx\" (UID: \"063ffc6e-9bc6-41ea-9d6d-d73e3923c92a\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-8tqzx" Oct 07 08:25:34 crc kubenswrapper[4875]: I1007 08:25:34.672459 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cnchw\" (UniqueName: \"kubernetes.io/projected/063ffc6e-9bc6-41ea-9d6d-d73e3923c92a-kube-api-access-cnchw\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-8tqzx\" (UID: \"063ffc6e-9bc6-41ea-9d6d-d73e3923c92a\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-8tqzx" Oct 07 08:25:34 crc kubenswrapper[4875]: I1007 08:25:34.774124 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/063ffc6e-9bc6-41ea-9d6d-d73e3923c92a-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-8tqzx\" (UID: \"063ffc6e-9bc6-41ea-9d6d-d73e3923c92a\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-8tqzx" Oct 07 08:25:34 crc kubenswrapper[4875]: I1007 08:25:34.774199 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/063ffc6e-9bc6-41ea-9d6d-d73e3923c92a-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-8tqzx\" (UID: \"063ffc6e-9bc6-41ea-9d6d-d73e3923c92a\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-8tqzx" Oct 07 08:25:34 crc kubenswrapper[4875]: I1007 08:25:34.774276 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cnchw\" (UniqueName: \"kubernetes.io/projected/063ffc6e-9bc6-41ea-9d6d-d73e3923c92a-kube-api-access-cnchw\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-8tqzx\" (UID: \"063ffc6e-9bc6-41ea-9d6d-d73e3923c92a\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-8tqzx" Oct 07 08:25:34 crc kubenswrapper[4875]: I1007 08:25:34.780373 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/063ffc6e-9bc6-41ea-9d6d-d73e3923c92a-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-8tqzx\" (UID: \"063ffc6e-9bc6-41ea-9d6d-d73e3923c92a\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-8tqzx" Oct 07 08:25:34 crc kubenswrapper[4875]: I1007 08:25:34.789902 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/063ffc6e-9bc6-41ea-9d6d-d73e3923c92a-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-8tqzx\" (UID: \"063ffc6e-9bc6-41ea-9d6d-d73e3923c92a\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-8tqzx" Oct 07 08:25:34 crc kubenswrapper[4875]: I1007 08:25:34.791966 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cnchw\" (UniqueName: \"kubernetes.io/projected/063ffc6e-9bc6-41ea-9d6d-d73e3923c92a-kube-api-access-cnchw\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-8tqzx\" (UID: \"063ffc6e-9bc6-41ea-9d6d-d73e3923c92a\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-8tqzx" Oct 07 08:25:34 crc kubenswrapper[4875]: I1007 08:25:34.936713 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-8tqzx" Oct 07 08:25:35 crc kubenswrapper[4875]: I1007 08:25:35.458991 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-8tqzx"] Oct 07 08:25:35 crc kubenswrapper[4875]: I1007 08:25:35.539049 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-8tqzx" event={"ID":"063ffc6e-9bc6-41ea-9d6d-d73e3923c92a","Type":"ContainerStarted","Data":"ac00db5875f164c27386d3d8c1ac9acd8b5c1a4d40be99637403ed1ec3f58052"} Oct 07 08:25:36 crc kubenswrapper[4875]: I1007 08:25:36.548788 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-8tqzx" event={"ID":"063ffc6e-9bc6-41ea-9d6d-d73e3923c92a","Type":"ContainerStarted","Data":"3ed73252131242326ba88681033c16991781e8335002ab7e6ac22a7592bab7fb"} Oct 07 08:25:36 crc kubenswrapper[4875]: I1007 08:25:36.568578 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-8tqzx" podStartSLOduration=2.185906895 podStartE2EDuration="2.568548344s" podCreationTimestamp="2025-10-07 08:25:34 +0000 UTC" firstStartedPulling="2025-10-07 08:25:35.458350496 +0000 UTC m=+1760.418121039" lastFinishedPulling="2025-10-07 08:25:35.840991945 +0000 UTC m=+1760.800762488" observedRunningTime="2025-10-07 08:25:36.564865196 +0000 UTC m=+1761.524635759" watchObservedRunningTime="2025-10-07 08:25:36.568548344 +0000 UTC m=+1761.528318927" Oct 07 08:25:37 crc kubenswrapper[4875]: I1007 08:25:37.699665 4875 scope.go:117] "RemoveContainer" containerID="e7a8b49c8a3c1b65fb51ee691d301a717bb3f37d230c7e4911b755a2a993aaef" Oct 07 08:25:37 crc kubenswrapper[4875]: E1007 08:25:37.700481 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hx68m_openshift-machine-config-operator(3928c10c-c3da-41eb-96b2-629d67cfb31f)\"" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" podUID="3928c10c-c3da-41eb-96b2-629d67cfb31f" Oct 07 08:25:42 crc kubenswrapper[4875]: I1007 08:25:42.081383 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-cell-mapping-vdpxw"] Oct 07 08:25:42 crc kubenswrapper[4875]: I1007 08:25:42.100394 4875 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-cell-mapping-vdpxw"] Oct 07 08:25:43 crc kubenswrapper[4875]: I1007 08:25:43.040635 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-cdldk"] Oct 07 08:25:43 crc kubenswrapper[4875]: I1007 08:25:43.052187 4875 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-cdldk"] Oct 07 08:25:43 crc kubenswrapper[4875]: I1007 08:25:43.718003 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31deb1ca-dafe-4134-bf0c-d8357153d8ed" path="/var/lib/kubelet/pods/31deb1ca-dafe-4134-bf0c-d8357153d8ed/volumes" Oct 07 08:25:43 crc kubenswrapper[4875]: I1007 08:25:43.719724 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf652b8b-3c7e-4673-a2f4-4af434e1ce0b" path="/var/lib/kubelet/pods/bf652b8b-3c7e-4673-a2f4-4af434e1ce0b/volumes" Oct 07 08:25:49 crc kubenswrapper[4875]: I1007 08:25:49.697591 4875 scope.go:117] "RemoveContainer" containerID="e7a8b49c8a3c1b65fb51ee691d301a717bb3f37d230c7e4911b755a2a993aaef" Oct 07 08:25:49 crc kubenswrapper[4875]: E1007 08:25:49.699533 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hx68m_openshift-machine-config-operator(3928c10c-c3da-41eb-96b2-629d67cfb31f)\"" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" podUID="3928c10c-c3da-41eb-96b2-629d67cfb31f" Oct 07 08:26:00 crc kubenswrapper[4875]: I1007 08:26:00.697755 4875 scope.go:117] "RemoveContainer" containerID="e7a8b49c8a3c1b65fb51ee691d301a717bb3f37d230c7e4911b755a2a993aaef" Oct 07 08:26:00 crc kubenswrapper[4875]: E1007 08:26:00.698557 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hx68m_openshift-machine-config-operator(3928c10c-c3da-41eb-96b2-629d67cfb31f)\"" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" podUID="3928c10c-c3da-41eb-96b2-629d67cfb31f" Oct 07 08:26:13 crc kubenswrapper[4875]: I1007 08:26:13.697695 4875 scope.go:117] "RemoveContainer" containerID="e7a8b49c8a3c1b65fb51ee691d301a717bb3f37d230c7e4911b755a2a993aaef" Oct 07 08:26:13 crc kubenswrapper[4875]: E1007 08:26:13.698389 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hx68m_openshift-machine-config-operator(3928c10c-c3da-41eb-96b2-629d67cfb31f)\"" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" podUID="3928c10c-c3da-41eb-96b2-629d67cfb31f" Oct 07 08:26:20 crc kubenswrapper[4875]: I1007 08:26:20.201790 4875 scope.go:117] "RemoveContainer" containerID="8a38f71ca8f0da6b4b2b8e740f3d07ee88fe560e47d3a66f9bb4a9eb24543eb7" Oct 07 08:26:20 crc kubenswrapper[4875]: I1007 08:26:20.244792 4875 scope.go:117] "RemoveContainer" containerID="83d348756739527f131afb9ecef42e53c72b8631d7429e118e7f7ce6fb7335cd" Oct 07 08:26:28 crc kubenswrapper[4875]: I1007 08:26:28.039110 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-cell-mapping-sg8rr"] Oct 07 08:26:28 crc kubenswrapper[4875]: I1007 08:26:28.045564 4875 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-cell-mapping-sg8rr"] Oct 07 08:26:28 crc kubenswrapper[4875]: I1007 08:26:28.698361 4875 scope.go:117] "RemoveContainer" containerID="e7a8b49c8a3c1b65fb51ee691d301a717bb3f37d230c7e4911b755a2a993aaef" Oct 07 08:26:28 crc kubenswrapper[4875]: E1007 08:26:28.698671 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hx68m_openshift-machine-config-operator(3928c10c-c3da-41eb-96b2-629d67cfb31f)\"" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" podUID="3928c10c-c3da-41eb-96b2-629d67cfb31f" Oct 07 08:26:29 crc kubenswrapper[4875]: I1007 08:26:29.711458 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="08172c25-7af0-4bf8-a8e3-aae5b6403176" path="/var/lib/kubelet/pods/08172c25-7af0-4bf8-a8e3-aae5b6403176/volumes" Oct 07 08:26:36 crc kubenswrapper[4875]: I1007 08:26:36.083594 4875 generic.go:334] "Generic (PLEG): container finished" podID="063ffc6e-9bc6-41ea-9d6d-d73e3923c92a" containerID="3ed73252131242326ba88681033c16991781e8335002ab7e6ac22a7592bab7fb" exitCode=2 Oct 07 08:26:36 crc kubenswrapper[4875]: I1007 08:26:36.083655 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-8tqzx" event={"ID":"063ffc6e-9bc6-41ea-9d6d-d73e3923c92a","Type":"ContainerDied","Data":"3ed73252131242326ba88681033c16991781e8335002ab7e6ac22a7592bab7fb"} Oct 07 08:26:37 crc kubenswrapper[4875]: I1007 08:26:37.533377 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-8tqzx" Oct 07 08:26:37 crc kubenswrapper[4875]: I1007 08:26:37.727656 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cnchw\" (UniqueName: \"kubernetes.io/projected/063ffc6e-9bc6-41ea-9d6d-d73e3923c92a-kube-api-access-cnchw\") pod \"063ffc6e-9bc6-41ea-9d6d-d73e3923c92a\" (UID: \"063ffc6e-9bc6-41ea-9d6d-d73e3923c92a\") " Oct 07 08:26:37 crc kubenswrapper[4875]: I1007 08:26:37.728062 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/063ffc6e-9bc6-41ea-9d6d-d73e3923c92a-inventory\") pod \"063ffc6e-9bc6-41ea-9d6d-d73e3923c92a\" (UID: \"063ffc6e-9bc6-41ea-9d6d-d73e3923c92a\") " Oct 07 08:26:37 crc kubenswrapper[4875]: I1007 08:26:37.728258 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/063ffc6e-9bc6-41ea-9d6d-d73e3923c92a-ssh-key\") pod \"063ffc6e-9bc6-41ea-9d6d-d73e3923c92a\" (UID: \"063ffc6e-9bc6-41ea-9d6d-d73e3923c92a\") " Oct 07 08:26:37 crc kubenswrapper[4875]: I1007 08:26:37.735924 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/063ffc6e-9bc6-41ea-9d6d-d73e3923c92a-kube-api-access-cnchw" (OuterVolumeSpecName: "kube-api-access-cnchw") pod "063ffc6e-9bc6-41ea-9d6d-d73e3923c92a" (UID: "063ffc6e-9bc6-41ea-9d6d-d73e3923c92a"). InnerVolumeSpecName "kube-api-access-cnchw". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 08:26:37 crc kubenswrapper[4875]: I1007 08:26:37.764298 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/063ffc6e-9bc6-41ea-9d6d-d73e3923c92a-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "063ffc6e-9bc6-41ea-9d6d-d73e3923c92a" (UID: "063ffc6e-9bc6-41ea-9d6d-d73e3923c92a"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:26:37 crc kubenswrapper[4875]: I1007 08:26:37.764734 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/063ffc6e-9bc6-41ea-9d6d-d73e3923c92a-inventory" (OuterVolumeSpecName: "inventory") pod "063ffc6e-9bc6-41ea-9d6d-d73e3923c92a" (UID: "063ffc6e-9bc6-41ea-9d6d-d73e3923c92a"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:26:37 crc kubenswrapper[4875]: I1007 08:26:37.831368 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cnchw\" (UniqueName: \"kubernetes.io/projected/063ffc6e-9bc6-41ea-9d6d-d73e3923c92a-kube-api-access-cnchw\") on node \"crc\" DevicePath \"\"" Oct 07 08:26:37 crc kubenswrapper[4875]: I1007 08:26:37.831405 4875 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/063ffc6e-9bc6-41ea-9d6d-d73e3923c92a-inventory\") on node \"crc\" DevicePath \"\"" Oct 07 08:26:37 crc kubenswrapper[4875]: I1007 08:26:37.831416 4875 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/063ffc6e-9bc6-41ea-9d6d-d73e3923c92a-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 07 08:26:38 crc kubenswrapper[4875]: I1007 08:26:38.109433 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-8tqzx" event={"ID":"063ffc6e-9bc6-41ea-9d6d-d73e3923c92a","Type":"ContainerDied","Data":"ac00db5875f164c27386d3d8c1ac9acd8b5c1a4d40be99637403ed1ec3f58052"} Oct 07 08:26:38 crc kubenswrapper[4875]: I1007 08:26:38.109728 4875 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ac00db5875f164c27386d3d8c1ac9acd8b5c1a4d40be99637403ed1ec3f58052" Oct 07 08:26:38 crc kubenswrapper[4875]: I1007 08:26:38.109501 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-8tqzx" Oct 07 08:26:42 crc kubenswrapper[4875]: I1007 08:26:42.698811 4875 scope.go:117] "RemoveContainer" containerID="e7a8b49c8a3c1b65fb51ee691d301a717bb3f37d230c7e4911b755a2a993aaef" Oct 07 08:26:43 crc kubenswrapper[4875]: I1007 08:26:43.157498 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" event={"ID":"3928c10c-c3da-41eb-96b2-629d67cfb31f","Type":"ContainerStarted","Data":"d9995d2c34b022ebc0e71bdaef75f97ecc970ab076a5c2eb0143ce90dc857f1a"} Oct 07 08:26:45 crc kubenswrapper[4875]: I1007 08:26:45.036348 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-7pmfq"] Oct 07 08:26:45 crc kubenswrapper[4875]: E1007 08:26:45.037653 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="063ffc6e-9bc6-41ea-9d6d-d73e3923c92a" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Oct 07 08:26:45 crc kubenswrapper[4875]: I1007 08:26:45.037671 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="063ffc6e-9bc6-41ea-9d6d-d73e3923c92a" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Oct 07 08:26:45 crc kubenswrapper[4875]: I1007 08:26:45.038121 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="063ffc6e-9bc6-41ea-9d6d-d73e3923c92a" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Oct 07 08:26:45 crc kubenswrapper[4875]: I1007 08:26:45.038767 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-7pmfq" Oct 07 08:26:45 crc kubenswrapper[4875]: I1007 08:26:45.047133 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 07 08:26:45 crc kubenswrapper[4875]: I1007 08:26:45.047188 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-bl4pq" Oct 07 08:26:45 crc kubenswrapper[4875]: I1007 08:26:45.047406 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 07 08:26:45 crc kubenswrapper[4875]: I1007 08:26:45.049807 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 07 08:26:45 crc kubenswrapper[4875]: I1007 08:26:45.065256 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-7pmfq"] Oct 07 08:26:45 crc kubenswrapper[4875]: I1007 08:26:45.180514 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/965bc704-2251-46a2-b947-05d835da9ea9-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-7pmfq\" (UID: \"965bc704-2251-46a2-b947-05d835da9ea9\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-7pmfq" Oct 07 08:26:45 crc kubenswrapper[4875]: I1007 08:26:45.181572 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/965bc704-2251-46a2-b947-05d835da9ea9-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-7pmfq\" (UID: \"965bc704-2251-46a2-b947-05d835da9ea9\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-7pmfq" Oct 07 08:26:45 crc kubenswrapper[4875]: I1007 08:26:45.181716 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s5nw6\" (UniqueName: \"kubernetes.io/projected/965bc704-2251-46a2-b947-05d835da9ea9-kube-api-access-s5nw6\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-7pmfq\" (UID: \"965bc704-2251-46a2-b947-05d835da9ea9\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-7pmfq" Oct 07 08:26:45 crc kubenswrapper[4875]: I1007 08:26:45.284015 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/965bc704-2251-46a2-b947-05d835da9ea9-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-7pmfq\" (UID: \"965bc704-2251-46a2-b947-05d835da9ea9\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-7pmfq" Oct 07 08:26:45 crc kubenswrapper[4875]: I1007 08:26:45.284060 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s5nw6\" (UniqueName: \"kubernetes.io/projected/965bc704-2251-46a2-b947-05d835da9ea9-kube-api-access-s5nw6\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-7pmfq\" (UID: \"965bc704-2251-46a2-b947-05d835da9ea9\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-7pmfq" Oct 07 08:26:45 crc kubenswrapper[4875]: I1007 08:26:45.284119 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/965bc704-2251-46a2-b947-05d835da9ea9-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-7pmfq\" (UID: \"965bc704-2251-46a2-b947-05d835da9ea9\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-7pmfq" Oct 07 08:26:45 crc kubenswrapper[4875]: I1007 08:26:45.290065 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/965bc704-2251-46a2-b947-05d835da9ea9-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-7pmfq\" (UID: \"965bc704-2251-46a2-b947-05d835da9ea9\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-7pmfq" Oct 07 08:26:45 crc kubenswrapper[4875]: I1007 08:26:45.290825 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/965bc704-2251-46a2-b947-05d835da9ea9-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-7pmfq\" (UID: \"965bc704-2251-46a2-b947-05d835da9ea9\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-7pmfq" Oct 07 08:26:45 crc kubenswrapper[4875]: I1007 08:26:45.307300 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s5nw6\" (UniqueName: \"kubernetes.io/projected/965bc704-2251-46a2-b947-05d835da9ea9-kube-api-access-s5nw6\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-7pmfq\" (UID: \"965bc704-2251-46a2-b947-05d835da9ea9\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-7pmfq" Oct 07 08:26:45 crc kubenswrapper[4875]: I1007 08:26:45.365145 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-7pmfq" Oct 07 08:26:45 crc kubenswrapper[4875]: I1007 08:26:45.854735 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-7pmfq"] Oct 07 08:26:45 crc kubenswrapper[4875]: W1007 08:26:45.861120 4875 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod965bc704_2251_46a2_b947_05d835da9ea9.slice/crio-8cb438df3198aef6dd85a5a72553fe986875de36b945fd0fd8ca727938d1cbdb WatchSource:0}: Error finding container 8cb438df3198aef6dd85a5a72553fe986875de36b945fd0fd8ca727938d1cbdb: Status 404 returned error can't find the container with id 8cb438df3198aef6dd85a5a72553fe986875de36b945fd0fd8ca727938d1cbdb Oct 07 08:26:46 crc kubenswrapper[4875]: I1007 08:26:46.182944 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-7pmfq" event={"ID":"965bc704-2251-46a2-b947-05d835da9ea9","Type":"ContainerStarted","Data":"8cb438df3198aef6dd85a5a72553fe986875de36b945fd0fd8ca727938d1cbdb"} Oct 07 08:26:47 crc kubenswrapper[4875]: I1007 08:26:47.200116 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-7pmfq" event={"ID":"965bc704-2251-46a2-b947-05d835da9ea9","Type":"ContainerStarted","Data":"d7d226ad2e2275219dad9cedca7ce69a04f1df9a6806b0e550f8014cc4ddd31d"} Oct 07 08:26:47 crc kubenswrapper[4875]: I1007 08:26:47.227914 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-7pmfq" podStartSLOduration=1.583488225 podStartE2EDuration="2.227898841s" podCreationTimestamp="2025-10-07 08:26:45 +0000 UTC" firstStartedPulling="2025-10-07 08:26:45.863740165 +0000 UTC m=+1830.823510708" lastFinishedPulling="2025-10-07 08:26:46.508150781 +0000 UTC m=+1831.467921324" observedRunningTime="2025-10-07 08:26:47.226090004 +0000 UTC m=+1832.185860547" watchObservedRunningTime="2025-10-07 08:26:47.227898841 +0000 UTC m=+1832.187669384" Oct 07 08:27:20 crc kubenswrapper[4875]: I1007 08:27:20.332984 4875 scope.go:117] "RemoveContainer" containerID="1486e2e795d66127280fce6db4987b955eaf0a37fc3a1ea6a0a0e4daffcc3040" Oct 07 08:27:37 crc kubenswrapper[4875]: I1007 08:27:37.657621 4875 generic.go:334] "Generic (PLEG): container finished" podID="965bc704-2251-46a2-b947-05d835da9ea9" containerID="d7d226ad2e2275219dad9cedca7ce69a04f1df9a6806b0e550f8014cc4ddd31d" exitCode=0 Oct 07 08:27:37 crc kubenswrapper[4875]: I1007 08:27:37.657680 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-7pmfq" event={"ID":"965bc704-2251-46a2-b947-05d835da9ea9","Type":"ContainerDied","Data":"d7d226ad2e2275219dad9cedca7ce69a04f1df9a6806b0e550f8014cc4ddd31d"} Oct 07 08:27:39 crc kubenswrapper[4875]: I1007 08:27:39.100896 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-7pmfq" Oct 07 08:27:39 crc kubenswrapper[4875]: I1007 08:27:39.146552 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/965bc704-2251-46a2-b947-05d835da9ea9-inventory\") pod \"965bc704-2251-46a2-b947-05d835da9ea9\" (UID: \"965bc704-2251-46a2-b947-05d835da9ea9\") " Oct 07 08:27:39 crc kubenswrapper[4875]: I1007 08:27:39.146674 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/965bc704-2251-46a2-b947-05d835da9ea9-ssh-key\") pod \"965bc704-2251-46a2-b947-05d835da9ea9\" (UID: \"965bc704-2251-46a2-b947-05d835da9ea9\") " Oct 07 08:27:39 crc kubenswrapper[4875]: I1007 08:27:39.146818 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s5nw6\" (UniqueName: \"kubernetes.io/projected/965bc704-2251-46a2-b947-05d835da9ea9-kube-api-access-s5nw6\") pod \"965bc704-2251-46a2-b947-05d835da9ea9\" (UID: \"965bc704-2251-46a2-b947-05d835da9ea9\") " Oct 07 08:27:39 crc kubenswrapper[4875]: I1007 08:27:39.153197 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/965bc704-2251-46a2-b947-05d835da9ea9-kube-api-access-s5nw6" (OuterVolumeSpecName: "kube-api-access-s5nw6") pod "965bc704-2251-46a2-b947-05d835da9ea9" (UID: "965bc704-2251-46a2-b947-05d835da9ea9"). InnerVolumeSpecName "kube-api-access-s5nw6". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 08:27:39 crc kubenswrapper[4875]: I1007 08:27:39.176498 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/965bc704-2251-46a2-b947-05d835da9ea9-inventory" (OuterVolumeSpecName: "inventory") pod "965bc704-2251-46a2-b947-05d835da9ea9" (UID: "965bc704-2251-46a2-b947-05d835da9ea9"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:27:39 crc kubenswrapper[4875]: I1007 08:27:39.177055 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/965bc704-2251-46a2-b947-05d835da9ea9-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "965bc704-2251-46a2-b947-05d835da9ea9" (UID: "965bc704-2251-46a2-b947-05d835da9ea9"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:27:39 crc kubenswrapper[4875]: I1007 08:27:39.249188 4875 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/965bc704-2251-46a2-b947-05d835da9ea9-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 07 08:27:39 crc kubenswrapper[4875]: I1007 08:27:39.249234 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s5nw6\" (UniqueName: \"kubernetes.io/projected/965bc704-2251-46a2-b947-05d835da9ea9-kube-api-access-s5nw6\") on node \"crc\" DevicePath \"\"" Oct 07 08:27:39 crc kubenswrapper[4875]: I1007 08:27:39.249251 4875 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/965bc704-2251-46a2-b947-05d835da9ea9-inventory\") on node \"crc\" DevicePath \"\"" Oct 07 08:27:39 crc kubenswrapper[4875]: I1007 08:27:39.675218 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-7pmfq" event={"ID":"965bc704-2251-46a2-b947-05d835da9ea9","Type":"ContainerDied","Data":"8cb438df3198aef6dd85a5a72553fe986875de36b945fd0fd8ca727938d1cbdb"} Oct 07 08:27:39 crc kubenswrapper[4875]: I1007 08:27:39.675467 4875 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8cb438df3198aef6dd85a5a72553fe986875de36b945fd0fd8ca727938d1cbdb" Oct 07 08:27:39 crc kubenswrapper[4875]: I1007 08:27:39.675268 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-7pmfq" Oct 07 08:27:39 crc kubenswrapper[4875]: I1007 08:27:39.762190 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-prbmz"] Oct 07 08:27:39 crc kubenswrapper[4875]: E1007 08:27:39.762701 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="965bc704-2251-46a2-b947-05d835da9ea9" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Oct 07 08:27:39 crc kubenswrapper[4875]: I1007 08:27:39.762726 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="965bc704-2251-46a2-b947-05d835da9ea9" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Oct 07 08:27:39 crc kubenswrapper[4875]: I1007 08:27:39.762972 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="965bc704-2251-46a2-b947-05d835da9ea9" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Oct 07 08:27:39 crc kubenswrapper[4875]: I1007 08:27:39.763761 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-prbmz" Oct 07 08:27:39 crc kubenswrapper[4875]: I1007 08:27:39.765442 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 07 08:27:39 crc kubenswrapper[4875]: I1007 08:27:39.765598 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 07 08:27:39 crc kubenswrapper[4875]: I1007 08:27:39.765820 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-bl4pq" Oct 07 08:27:39 crc kubenswrapper[4875]: I1007 08:27:39.765901 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 07 08:27:39 crc kubenswrapper[4875]: I1007 08:27:39.780788 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-prbmz"] Oct 07 08:27:39 crc kubenswrapper[4875]: I1007 08:27:39.859811 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/dfa58f34-fade-4504-8329-a9b13eb13726-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-prbmz\" (UID: \"dfa58f34-fade-4504-8329-a9b13eb13726\") " pod="openstack/ssh-known-hosts-edpm-deployment-prbmz" Oct 07 08:27:39 crc kubenswrapper[4875]: I1007 08:27:39.859867 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2kxsq\" (UniqueName: \"kubernetes.io/projected/dfa58f34-fade-4504-8329-a9b13eb13726-kube-api-access-2kxsq\") pod \"ssh-known-hosts-edpm-deployment-prbmz\" (UID: \"dfa58f34-fade-4504-8329-a9b13eb13726\") " pod="openstack/ssh-known-hosts-edpm-deployment-prbmz" Oct 07 08:27:39 crc kubenswrapper[4875]: I1007 08:27:39.860266 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/dfa58f34-fade-4504-8329-a9b13eb13726-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-prbmz\" (UID: \"dfa58f34-fade-4504-8329-a9b13eb13726\") " pod="openstack/ssh-known-hosts-edpm-deployment-prbmz" Oct 07 08:27:39 crc kubenswrapper[4875]: I1007 08:27:39.962079 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/dfa58f34-fade-4504-8329-a9b13eb13726-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-prbmz\" (UID: \"dfa58f34-fade-4504-8329-a9b13eb13726\") " pod="openstack/ssh-known-hosts-edpm-deployment-prbmz" Oct 07 08:27:39 crc kubenswrapper[4875]: I1007 08:27:39.962173 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/dfa58f34-fade-4504-8329-a9b13eb13726-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-prbmz\" (UID: \"dfa58f34-fade-4504-8329-a9b13eb13726\") " pod="openstack/ssh-known-hosts-edpm-deployment-prbmz" Oct 07 08:27:39 crc kubenswrapper[4875]: I1007 08:27:39.962210 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2kxsq\" (UniqueName: \"kubernetes.io/projected/dfa58f34-fade-4504-8329-a9b13eb13726-kube-api-access-2kxsq\") pod \"ssh-known-hosts-edpm-deployment-prbmz\" (UID: \"dfa58f34-fade-4504-8329-a9b13eb13726\") " pod="openstack/ssh-known-hosts-edpm-deployment-prbmz" Oct 07 08:27:39 crc kubenswrapper[4875]: I1007 08:27:39.967668 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/dfa58f34-fade-4504-8329-a9b13eb13726-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-prbmz\" (UID: \"dfa58f34-fade-4504-8329-a9b13eb13726\") " pod="openstack/ssh-known-hosts-edpm-deployment-prbmz" Oct 07 08:27:39 crc kubenswrapper[4875]: I1007 08:27:39.974989 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/dfa58f34-fade-4504-8329-a9b13eb13726-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-prbmz\" (UID: \"dfa58f34-fade-4504-8329-a9b13eb13726\") " pod="openstack/ssh-known-hosts-edpm-deployment-prbmz" Oct 07 08:27:39 crc kubenswrapper[4875]: I1007 08:27:39.980360 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2kxsq\" (UniqueName: \"kubernetes.io/projected/dfa58f34-fade-4504-8329-a9b13eb13726-kube-api-access-2kxsq\") pod \"ssh-known-hosts-edpm-deployment-prbmz\" (UID: \"dfa58f34-fade-4504-8329-a9b13eb13726\") " pod="openstack/ssh-known-hosts-edpm-deployment-prbmz" Oct 07 08:27:40 crc kubenswrapper[4875]: I1007 08:27:40.081660 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-prbmz" Oct 07 08:27:40 crc kubenswrapper[4875]: I1007 08:27:40.697346 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-prbmz"] Oct 07 08:27:41 crc kubenswrapper[4875]: I1007 08:27:41.723806 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-prbmz" event={"ID":"dfa58f34-fade-4504-8329-a9b13eb13726","Type":"ContainerStarted","Data":"b29b0013981dd45718e0d65cfc60be40ce86173ef48e7e762c9837660b732937"} Oct 07 08:27:41 crc kubenswrapper[4875]: I1007 08:27:41.724279 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-prbmz" event={"ID":"dfa58f34-fade-4504-8329-a9b13eb13726","Type":"ContainerStarted","Data":"45337549b06017ff07aa2bddf0a96acc87e5f9814172db401ef0a9a0202738e2"} Oct 07 08:27:41 crc kubenswrapper[4875]: I1007 08:27:41.728892 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ssh-known-hosts-edpm-deployment-prbmz" podStartSLOduration=2.197475599 podStartE2EDuration="2.728857228s" podCreationTimestamp="2025-10-07 08:27:39 +0000 UTC" firstStartedPulling="2025-10-07 08:27:40.700930462 +0000 UTC m=+1885.660701005" lastFinishedPulling="2025-10-07 08:27:41.232312091 +0000 UTC m=+1886.192082634" observedRunningTime="2025-10-07 08:27:41.72548347 +0000 UTC m=+1886.685254013" watchObservedRunningTime="2025-10-07 08:27:41.728857228 +0000 UTC m=+1886.688627791" Oct 07 08:27:49 crc kubenswrapper[4875]: I1007 08:27:49.782906 4875 generic.go:334] "Generic (PLEG): container finished" podID="dfa58f34-fade-4504-8329-a9b13eb13726" containerID="b29b0013981dd45718e0d65cfc60be40ce86173ef48e7e762c9837660b732937" exitCode=0 Oct 07 08:27:49 crc kubenswrapper[4875]: I1007 08:27:49.783008 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-prbmz" event={"ID":"dfa58f34-fade-4504-8329-a9b13eb13726","Type":"ContainerDied","Data":"b29b0013981dd45718e0d65cfc60be40ce86173ef48e7e762c9837660b732937"} Oct 07 08:27:51 crc kubenswrapper[4875]: I1007 08:27:51.274605 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-prbmz" Oct 07 08:27:51 crc kubenswrapper[4875]: I1007 08:27:51.403502 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2kxsq\" (UniqueName: \"kubernetes.io/projected/dfa58f34-fade-4504-8329-a9b13eb13726-kube-api-access-2kxsq\") pod \"dfa58f34-fade-4504-8329-a9b13eb13726\" (UID: \"dfa58f34-fade-4504-8329-a9b13eb13726\") " Oct 07 08:27:51 crc kubenswrapper[4875]: I1007 08:27:51.403604 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/dfa58f34-fade-4504-8329-a9b13eb13726-ssh-key-openstack-edpm-ipam\") pod \"dfa58f34-fade-4504-8329-a9b13eb13726\" (UID: \"dfa58f34-fade-4504-8329-a9b13eb13726\") " Oct 07 08:27:51 crc kubenswrapper[4875]: I1007 08:27:51.403716 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/dfa58f34-fade-4504-8329-a9b13eb13726-inventory-0\") pod \"dfa58f34-fade-4504-8329-a9b13eb13726\" (UID: \"dfa58f34-fade-4504-8329-a9b13eb13726\") " Oct 07 08:27:51 crc kubenswrapper[4875]: I1007 08:27:51.412452 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dfa58f34-fade-4504-8329-a9b13eb13726-kube-api-access-2kxsq" (OuterVolumeSpecName: "kube-api-access-2kxsq") pod "dfa58f34-fade-4504-8329-a9b13eb13726" (UID: "dfa58f34-fade-4504-8329-a9b13eb13726"). InnerVolumeSpecName "kube-api-access-2kxsq". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 08:27:51 crc kubenswrapper[4875]: I1007 08:27:51.436083 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dfa58f34-fade-4504-8329-a9b13eb13726-inventory-0" (OuterVolumeSpecName: "inventory-0") pod "dfa58f34-fade-4504-8329-a9b13eb13726" (UID: "dfa58f34-fade-4504-8329-a9b13eb13726"). InnerVolumeSpecName "inventory-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:27:51 crc kubenswrapper[4875]: I1007 08:27:51.463933 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dfa58f34-fade-4504-8329-a9b13eb13726-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "dfa58f34-fade-4504-8329-a9b13eb13726" (UID: "dfa58f34-fade-4504-8329-a9b13eb13726"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:27:51 crc kubenswrapper[4875]: I1007 08:27:51.507450 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2kxsq\" (UniqueName: \"kubernetes.io/projected/dfa58f34-fade-4504-8329-a9b13eb13726-kube-api-access-2kxsq\") on node \"crc\" DevicePath \"\"" Oct 07 08:27:51 crc kubenswrapper[4875]: I1007 08:27:51.507534 4875 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/dfa58f34-fade-4504-8329-a9b13eb13726-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Oct 07 08:27:51 crc kubenswrapper[4875]: I1007 08:27:51.507549 4875 reconciler_common.go:293] "Volume detached for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/dfa58f34-fade-4504-8329-a9b13eb13726-inventory-0\") on node \"crc\" DevicePath \"\"" Oct 07 08:27:51 crc kubenswrapper[4875]: I1007 08:27:51.799505 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-prbmz" event={"ID":"dfa58f34-fade-4504-8329-a9b13eb13726","Type":"ContainerDied","Data":"45337549b06017ff07aa2bddf0a96acc87e5f9814172db401ef0a9a0202738e2"} Oct 07 08:27:51 crc kubenswrapper[4875]: I1007 08:27:51.799825 4875 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="45337549b06017ff07aa2bddf0a96acc87e5f9814172db401ef0a9a0202738e2" Oct 07 08:27:51 crc kubenswrapper[4875]: I1007 08:27:51.799911 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-prbmz" Oct 07 08:27:51 crc kubenswrapper[4875]: I1007 08:27:51.865768 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-zlwhl"] Oct 07 08:27:51 crc kubenswrapper[4875]: E1007 08:27:51.866266 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dfa58f34-fade-4504-8329-a9b13eb13726" containerName="ssh-known-hosts-edpm-deployment" Oct 07 08:27:51 crc kubenswrapper[4875]: I1007 08:27:51.866283 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="dfa58f34-fade-4504-8329-a9b13eb13726" containerName="ssh-known-hosts-edpm-deployment" Oct 07 08:27:51 crc kubenswrapper[4875]: I1007 08:27:51.866485 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="dfa58f34-fade-4504-8329-a9b13eb13726" containerName="ssh-known-hosts-edpm-deployment" Oct 07 08:27:51 crc kubenswrapper[4875]: I1007 08:27:51.867149 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-zlwhl" Oct 07 08:27:51 crc kubenswrapper[4875]: I1007 08:27:51.869814 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 07 08:27:51 crc kubenswrapper[4875]: I1007 08:27:51.870079 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-bl4pq" Oct 07 08:27:51 crc kubenswrapper[4875]: I1007 08:27:51.870179 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 07 08:27:51 crc kubenswrapper[4875]: I1007 08:27:51.871044 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 07 08:27:51 crc kubenswrapper[4875]: I1007 08:27:51.890745 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-zlwhl"] Oct 07 08:27:51 crc kubenswrapper[4875]: I1007 08:27:51.914278 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3910d9af-0583-40ef-887d-e73ddf795725-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-zlwhl\" (UID: \"3910d9af-0583-40ef-887d-e73ddf795725\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-zlwhl" Oct 07 08:27:51 crc kubenswrapper[4875]: I1007 08:27:51.914475 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3910d9af-0583-40ef-887d-e73ddf795725-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-zlwhl\" (UID: \"3910d9af-0583-40ef-887d-e73ddf795725\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-zlwhl" Oct 07 08:27:51 crc kubenswrapper[4875]: I1007 08:27:51.914534 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j78jp\" (UniqueName: \"kubernetes.io/projected/3910d9af-0583-40ef-887d-e73ddf795725-kube-api-access-j78jp\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-zlwhl\" (UID: \"3910d9af-0583-40ef-887d-e73ddf795725\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-zlwhl" Oct 07 08:27:52 crc kubenswrapper[4875]: I1007 08:27:52.016536 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3910d9af-0583-40ef-887d-e73ddf795725-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-zlwhl\" (UID: \"3910d9af-0583-40ef-887d-e73ddf795725\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-zlwhl" Oct 07 08:27:52 crc kubenswrapper[4875]: I1007 08:27:52.016616 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j78jp\" (UniqueName: \"kubernetes.io/projected/3910d9af-0583-40ef-887d-e73ddf795725-kube-api-access-j78jp\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-zlwhl\" (UID: \"3910d9af-0583-40ef-887d-e73ddf795725\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-zlwhl" Oct 07 08:27:52 crc kubenswrapper[4875]: I1007 08:27:52.016752 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3910d9af-0583-40ef-887d-e73ddf795725-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-zlwhl\" (UID: \"3910d9af-0583-40ef-887d-e73ddf795725\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-zlwhl" Oct 07 08:27:52 crc kubenswrapper[4875]: I1007 08:27:52.020943 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3910d9af-0583-40ef-887d-e73ddf795725-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-zlwhl\" (UID: \"3910d9af-0583-40ef-887d-e73ddf795725\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-zlwhl" Oct 07 08:27:52 crc kubenswrapper[4875]: I1007 08:27:52.021118 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3910d9af-0583-40ef-887d-e73ddf795725-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-zlwhl\" (UID: \"3910d9af-0583-40ef-887d-e73ddf795725\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-zlwhl" Oct 07 08:27:52 crc kubenswrapper[4875]: I1007 08:27:52.038336 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j78jp\" (UniqueName: \"kubernetes.io/projected/3910d9af-0583-40ef-887d-e73ddf795725-kube-api-access-j78jp\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-zlwhl\" (UID: \"3910d9af-0583-40ef-887d-e73ddf795725\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-zlwhl" Oct 07 08:27:52 crc kubenswrapper[4875]: I1007 08:27:52.189096 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-zlwhl" Oct 07 08:27:52 crc kubenswrapper[4875]: I1007 08:27:52.768982 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-zlwhl"] Oct 07 08:27:52 crc kubenswrapper[4875]: I1007 08:27:52.810553 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-zlwhl" event={"ID":"3910d9af-0583-40ef-887d-e73ddf795725","Type":"ContainerStarted","Data":"812515e59a585f944f53e781d64f455f1d4060859e74e10f65b756e6a6031787"} Oct 07 08:27:53 crc kubenswrapper[4875]: I1007 08:27:53.824916 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-zlwhl" event={"ID":"3910d9af-0583-40ef-887d-e73ddf795725","Type":"ContainerStarted","Data":"ebf1548b2d6fe44e0684c4151210585542391479c0958972b94c8efaa41aaf8d"} Oct 07 08:28:02 crc kubenswrapper[4875]: I1007 08:28:02.918342 4875 generic.go:334] "Generic (PLEG): container finished" podID="3910d9af-0583-40ef-887d-e73ddf795725" containerID="ebf1548b2d6fe44e0684c4151210585542391479c0958972b94c8efaa41aaf8d" exitCode=0 Oct 07 08:28:02 crc kubenswrapper[4875]: I1007 08:28:02.919068 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-zlwhl" event={"ID":"3910d9af-0583-40ef-887d-e73ddf795725","Type":"ContainerDied","Data":"ebf1548b2d6fe44e0684c4151210585542391479c0958972b94c8efaa41aaf8d"} Oct 07 08:28:04 crc kubenswrapper[4875]: I1007 08:28:04.418676 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-zlwhl" Oct 07 08:28:04 crc kubenswrapper[4875]: I1007 08:28:04.516605 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j78jp\" (UniqueName: \"kubernetes.io/projected/3910d9af-0583-40ef-887d-e73ddf795725-kube-api-access-j78jp\") pod \"3910d9af-0583-40ef-887d-e73ddf795725\" (UID: \"3910d9af-0583-40ef-887d-e73ddf795725\") " Oct 07 08:28:04 crc kubenswrapper[4875]: I1007 08:28:04.516691 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3910d9af-0583-40ef-887d-e73ddf795725-inventory\") pod \"3910d9af-0583-40ef-887d-e73ddf795725\" (UID: \"3910d9af-0583-40ef-887d-e73ddf795725\") " Oct 07 08:28:04 crc kubenswrapper[4875]: I1007 08:28:04.516743 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3910d9af-0583-40ef-887d-e73ddf795725-ssh-key\") pod \"3910d9af-0583-40ef-887d-e73ddf795725\" (UID: \"3910d9af-0583-40ef-887d-e73ddf795725\") " Oct 07 08:28:04 crc kubenswrapper[4875]: I1007 08:28:04.523348 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3910d9af-0583-40ef-887d-e73ddf795725-kube-api-access-j78jp" (OuterVolumeSpecName: "kube-api-access-j78jp") pod "3910d9af-0583-40ef-887d-e73ddf795725" (UID: "3910d9af-0583-40ef-887d-e73ddf795725"). InnerVolumeSpecName "kube-api-access-j78jp". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 08:28:04 crc kubenswrapper[4875]: I1007 08:28:04.545138 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3910d9af-0583-40ef-887d-e73ddf795725-inventory" (OuterVolumeSpecName: "inventory") pod "3910d9af-0583-40ef-887d-e73ddf795725" (UID: "3910d9af-0583-40ef-887d-e73ddf795725"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:28:04 crc kubenswrapper[4875]: I1007 08:28:04.547528 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3910d9af-0583-40ef-887d-e73ddf795725-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "3910d9af-0583-40ef-887d-e73ddf795725" (UID: "3910d9af-0583-40ef-887d-e73ddf795725"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:28:04 crc kubenswrapper[4875]: I1007 08:28:04.618543 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j78jp\" (UniqueName: \"kubernetes.io/projected/3910d9af-0583-40ef-887d-e73ddf795725-kube-api-access-j78jp\") on node \"crc\" DevicePath \"\"" Oct 07 08:28:04 crc kubenswrapper[4875]: I1007 08:28:04.618580 4875 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3910d9af-0583-40ef-887d-e73ddf795725-inventory\") on node \"crc\" DevicePath \"\"" Oct 07 08:28:04 crc kubenswrapper[4875]: I1007 08:28:04.618589 4875 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3910d9af-0583-40ef-887d-e73ddf795725-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 07 08:28:04 crc kubenswrapper[4875]: I1007 08:28:04.949946 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-zlwhl" event={"ID":"3910d9af-0583-40ef-887d-e73ddf795725","Type":"ContainerDied","Data":"812515e59a585f944f53e781d64f455f1d4060859e74e10f65b756e6a6031787"} Oct 07 08:28:04 crc kubenswrapper[4875]: I1007 08:28:04.949989 4875 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="812515e59a585f944f53e781d64f455f1d4060859e74e10f65b756e6a6031787" Oct 07 08:28:04 crc kubenswrapper[4875]: I1007 08:28:04.950059 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-zlwhl" Oct 07 08:28:05 crc kubenswrapper[4875]: I1007 08:28:05.026438 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-r76h5"] Oct 07 08:28:05 crc kubenswrapper[4875]: E1007 08:28:05.026914 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3910d9af-0583-40ef-887d-e73ddf795725" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Oct 07 08:28:05 crc kubenswrapper[4875]: I1007 08:28:05.026940 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="3910d9af-0583-40ef-887d-e73ddf795725" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Oct 07 08:28:05 crc kubenswrapper[4875]: I1007 08:28:05.027155 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="3910d9af-0583-40ef-887d-e73ddf795725" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Oct 07 08:28:05 crc kubenswrapper[4875]: I1007 08:28:05.027796 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-r76h5" Oct 07 08:28:05 crc kubenswrapper[4875]: I1007 08:28:05.031868 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 07 08:28:05 crc kubenswrapper[4875]: I1007 08:28:05.032098 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 07 08:28:05 crc kubenswrapper[4875]: I1007 08:28:05.033298 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 07 08:28:05 crc kubenswrapper[4875]: I1007 08:28:05.033692 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-bl4pq" Oct 07 08:28:05 crc kubenswrapper[4875]: I1007 08:28:05.038804 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-r76h5"] Oct 07 08:28:05 crc kubenswrapper[4875]: E1007 08:28:05.105465 4875 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3910d9af_0583_40ef_887d_e73ddf795725.slice\": RecentStats: unable to find data in memory cache]" Oct 07 08:28:05 crc kubenswrapper[4875]: I1007 08:28:05.127968 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f21455d3-51d3-464a-acd5-d707dfa2ee70-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-r76h5\" (UID: \"f21455d3-51d3-464a-acd5-d707dfa2ee70\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-r76h5" Oct 07 08:28:05 crc kubenswrapper[4875]: I1007 08:28:05.128021 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s2xn2\" (UniqueName: \"kubernetes.io/projected/f21455d3-51d3-464a-acd5-d707dfa2ee70-kube-api-access-s2xn2\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-r76h5\" (UID: \"f21455d3-51d3-464a-acd5-d707dfa2ee70\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-r76h5" Oct 07 08:28:05 crc kubenswrapper[4875]: I1007 08:28:05.128054 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f21455d3-51d3-464a-acd5-d707dfa2ee70-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-r76h5\" (UID: \"f21455d3-51d3-464a-acd5-d707dfa2ee70\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-r76h5" Oct 07 08:28:05 crc kubenswrapper[4875]: I1007 08:28:05.230299 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f21455d3-51d3-464a-acd5-d707dfa2ee70-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-r76h5\" (UID: \"f21455d3-51d3-464a-acd5-d707dfa2ee70\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-r76h5" Oct 07 08:28:05 crc kubenswrapper[4875]: I1007 08:28:05.230728 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2xn2\" (UniqueName: \"kubernetes.io/projected/f21455d3-51d3-464a-acd5-d707dfa2ee70-kube-api-access-s2xn2\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-r76h5\" (UID: \"f21455d3-51d3-464a-acd5-d707dfa2ee70\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-r76h5" Oct 07 08:28:05 crc kubenswrapper[4875]: I1007 08:28:05.230766 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f21455d3-51d3-464a-acd5-d707dfa2ee70-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-r76h5\" (UID: \"f21455d3-51d3-464a-acd5-d707dfa2ee70\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-r76h5" Oct 07 08:28:05 crc kubenswrapper[4875]: I1007 08:28:05.236228 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f21455d3-51d3-464a-acd5-d707dfa2ee70-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-r76h5\" (UID: \"f21455d3-51d3-464a-acd5-d707dfa2ee70\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-r76h5" Oct 07 08:28:05 crc kubenswrapper[4875]: I1007 08:28:05.236426 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f21455d3-51d3-464a-acd5-d707dfa2ee70-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-r76h5\" (UID: \"f21455d3-51d3-464a-acd5-d707dfa2ee70\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-r76h5" Oct 07 08:28:05 crc kubenswrapper[4875]: I1007 08:28:05.247831 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2xn2\" (UniqueName: \"kubernetes.io/projected/f21455d3-51d3-464a-acd5-d707dfa2ee70-kube-api-access-s2xn2\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-r76h5\" (UID: \"f21455d3-51d3-464a-acd5-d707dfa2ee70\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-r76h5" Oct 07 08:28:05 crc kubenswrapper[4875]: I1007 08:28:05.378980 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-r76h5" Oct 07 08:28:05 crc kubenswrapper[4875]: I1007 08:28:05.917199 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-r76h5"] Oct 07 08:28:05 crc kubenswrapper[4875]: I1007 08:28:05.961381 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-r76h5" event={"ID":"f21455d3-51d3-464a-acd5-d707dfa2ee70","Type":"ContainerStarted","Data":"53ab74453680e8e6685b7080d7c45f55283298e9d3ea9f067f7c1dba5d0138ad"} Oct 07 08:28:06 crc kubenswrapper[4875]: I1007 08:28:06.976257 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-r76h5" event={"ID":"f21455d3-51d3-464a-acd5-d707dfa2ee70","Type":"ContainerStarted","Data":"db073841613d4ec481657d13b2d0bd378f927f232f886b87ffa82acf54c4b667"} Oct 07 08:28:07 crc kubenswrapper[4875]: I1007 08:28:06.999970 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-r76h5" podStartSLOduration=1.579356134 podStartE2EDuration="1.999949576s" podCreationTimestamp="2025-10-07 08:28:05 +0000 UTC" firstStartedPulling="2025-10-07 08:28:05.928997577 +0000 UTC m=+1910.888768120" lastFinishedPulling="2025-10-07 08:28:06.349590979 +0000 UTC m=+1911.309361562" observedRunningTime="2025-10-07 08:28:06.999685057 +0000 UTC m=+1911.959455600" watchObservedRunningTime="2025-10-07 08:28:06.999949576 +0000 UTC m=+1911.959720119" Oct 07 08:28:17 crc kubenswrapper[4875]: I1007 08:28:17.073429 4875 generic.go:334] "Generic (PLEG): container finished" podID="f21455d3-51d3-464a-acd5-d707dfa2ee70" containerID="db073841613d4ec481657d13b2d0bd378f927f232f886b87ffa82acf54c4b667" exitCode=0 Oct 07 08:28:17 crc kubenswrapper[4875]: I1007 08:28:17.073572 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-r76h5" event={"ID":"f21455d3-51d3-464a-acd5-d707dfa2ee70","Type":"ContainerDied","Data":"db073841613d4ec481657d13b2d0bd378f927f232f886b87ffa82acf54c4b667"} Oct 07 08:28:18 crc kubenswrapper[4875]: I1007 08:28:18.501711 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-r76h5" Oct 07 08:28:18 crc kubenswrapper[4875]: I1007 08:28:18.605624 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f21455d3-51d3-464a-acd5-d707dfa2ee70-ssh-key\") pod \"f21455d3-51d3-464a-acd5-d707dfa2ee70\" (UID: \"f21455d3-51d3-464a-acd5-d707dfa2ee70\") " Oct 07 08:28:18 crc kubenswrapper[4875]: I1007 08:28:18.605783 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f21455d3-51d3-464a-acd5-d707dfa2ee70-inventory\") pod \"f21455d3-51d3-464a-acd5-d707dfa2ee70\" (UID: \"f21455d3-51d3-464a-acd5-d707dfa2ee70\") " Oct 07 08:28:18 crc kubenswrapper[4875]: I1007 08:28:18.605833 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s2xn2\" (UniqueName: \"kubernetes.io/projected/f21455d3-51d3-464a-acd5-d707dfa2ee70-kube-api-access-s2xn2\") pod \"f21455d3-51d3-464a-acd5-d707dfa2ee70\" (UID: \"f21455d3-51d3-464a-acd5-d707dfa2ee70\") " Oct 07 08:28:18 crc kubenswrapper[4875]: I1007 08:28:18.614452 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f21455d3-51d3-464a-acd5-d707dfa2ee70-kube-api-access-s2xn2" (OuterVolumeSpecName: "kube-api-access-s2xn2") pod "f21455d3-51d3-464a-acd5-d707dfa2ee70" (UID: "f21455d3-51d3-464a-acd5-d707dfa2ee70"). InnerVolumeSpecName "kube-api-access-s2xn2". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 08:28:18 crc kubenswrapper[4875]: I1007 08:28:18.633847 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f21455d3-51d3-464a-acd5-d707dfa2ee70-inventory" (OuterVolumeSpecName: "inventory") pod "f21455d3-51d3-464a-acd5-d707dfa2ee70" (UID: "f21455d3-51d3-464a-acd5-d707dfa2ee70"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:28:18 crc kubenswrapper[4875]: I1007 08:28:18.647069 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f21455d3-51d3-464a-acd5-d707dfa2ee70-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "f21455d3-51d3-464a-acd5-d707dfa2ee70" (UID: "f21455d3-51d3-464a-acd5-d707dfa2ee70"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:28:18 crc kubenswrapper[4875]: I1007 08:28:18.709815 4875 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f21455d3-51d3-464a-acd5-d707dfa2ee70-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 07 08:28:18 crc kubenswrapper[4875]: I1007 08:28:18.709906 4875 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f21455d3-51d3-464a-acd5-d707dfa2ee70-inventory\") on node \"crc\" DevicePath \"\"" Oct 07 08:28:18 crc kubenswrapper[4875]: I1007 08:28:18.709929 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s2xn2\" (UniqueName: \"kubernetes.io/projected/f21455d3-51d3-464a-acd5-d707dfa2ee70-kube-api-access-s2xn2\") on node \"crc\" DevicePath \"\"" Oct 07 08:28:19 crc kubenswrapper[4875]: I1007 08:28:19.100067 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-r76h5" event={"ID":"f21455d3-51d3-464a-acd5-d707dfa2ee70","Type":"ContainerDied","Data":"53ab74453680e8e6685b7080d7c45f55283298e9d3ea9f067f7c1dba5d0138ad"} Oct 07 08:28:19 crc kubenswrapper[4875]: I1007 08:28:19.100114 4875 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="53ab74453680e8e6685b7080d7c45f55283298e9d3ea9f067f7c1dba5d0138ad" Oct 07 08:28:19 crc kubenswrapper[4875]: I1007 08:28:19.100181 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-r76h5" Oct 07 08:28:19 crc kubenswrapper[4875]: I1007 08:28:19.184994 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-t8wql"] Oct 07 08:28:19 crc kubenswrapper[4875]: E1007 08:28:19.185649 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f21455d3-51d3-464a-acd5-d707dfa2ee70" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Oct 07 08:28:19 crc kubenswrapper[4875]: I1007 08:28:19.185682 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="f21455d3-51d3-464a-acd5-d707dfa2ee70" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Oct 07 08:28:19 crc kubenswrapper[4875]: I1007 08:28:19.186064 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="f21455d3-51d3-464a-acd5-d707dfa2ee70" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Oct 07 08:28:19 crc kubenswrapper[4875]: I1007 08:28:19.187134 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-t8wql" Oct 07 08:28:19 crc kubenswrapper[4875]: I1007 08:28:19.194842 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-t8wql"] Oct 07 08:28:19 crc kubenswrapper[4875]: I1007 08:28:19.195848 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 07 08:28:19 crc kubenswrapper[4875]: I1007 08:28:19.195970 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-bl4pq" Oct 07 08:28:19 crc kubenswrapper[4875]: I1007 08:28:19.196019 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-libvirt-default-certs-0" Oct 07 08:28:19 crc kubenswrapper[4875]: I1007 08:28:19.195900 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-ovn-default-certs-0" Oct 07 08:28:19 crc kubenswrapper[4875]: I1007 08:28:19.195919 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 07 08:28:19 crc kubenswrapper[4875]: I1007 08:28:19.196172 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-neutron-metadata-default-certs-0" Oct 07 08:28:19 crc kubenswrapper[4875]: I1007 08:28:19.196268 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 07 08:28:19 crc kubenswrapper[4875]: I1007 08:28:19.196373 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-telemetry-default-certs-0" Oct 07 08:28:19 crc kubenswrapper[4875]: I1007 08:28:19.325541 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7030085-e862-4c57-9c9e-29e88006533e-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-t8wql\" (UID: \"f7030085-e862-4c57-9c9e-29e88006533e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-t8wql" Oct 07 08:28:19 crc kubenswrapper[4875]: I1007 08:28:19.325991 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6lq5q\" (UniqueName: \"kubernetes.io/projected/f7030085-e862-4c57-9c9e-29e88006533e-kube-api-access-6lq5q\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-t8wql\" (UID: \"f7030085-e862-4c57-9c9e-29e88006533e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-t8wql" Oct 07 08:28:19 crc kubenswrapper[4875]: I1007 08:28:19.326031 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7030085-e862-4c57-9c9e-29e88006533e-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-t8wql\" (UID: \"f7030085-e862-4c57-9c9e-29e88006533e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-t8wql" Oct 07 08:28:19 crc kubenswrapper[4875]: I1007 08:28:19.326080 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/f7030085-e862-4c57-9c9e-29e88006533e-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-t8wql\" (UID: \"f7030085-e862-4c57-9c9e-29e88006533e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-t8wql" Oct 07 08:28:19 crc kubenswrapper[4875]: I1007 08:28:19.326104 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f7030085-e862-4c57-9c9e-29e88006533e-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-t8wql\" (UID: \"f7030085-e862-4c57-9c9e-29e88006533e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-t8wql" Oct 07 08:28:19 crc kubenswrapper[4875]: I1007 08:28:19.326135 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f7030085-e862-4c57-9c9e-29e88006533e-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-t8wql\" (UID: \"f7030085-e862-4c57-9c9e-29e88006533e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-t8wql" Oct 07 08:28:19 crc kubenswrapper[4875]: I1007 08:28:19.326173 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7030085-e862-4c57-9c9e-29e88006533e-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-t8wql\" (UID: \"f7030085-e862-4c57-9c9e-29e88006533e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-t8wql" Oct 07 08:28:19 crc kubenswrapper[4875]: I1007 08:28:19.326195 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7030085-e862-4c57-9c9e-29e88006533e-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-t8wql\" (UID: \"f7030085-e862-4c57-9c9e-29e88006533e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-t8wql" Oct 07 08:28:19 crc kubenswrapper[4875]: I1007 08:28:19.326299 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7030085-e862-4c57-9c9e-29e88006533e-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-t8wql\" (UID: \"f7030085-e862-4c57-9c9e-29e88006533e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-t8wql" Oct 07 08:28:19 crc kubenswrapper[4875]: I1007 08:28:19.326322 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7030085-e862-4c57-9c9e-29e88006533e-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-t8wql\" (UID: \"f7030085-e862-4c57-9c9e-29e88006533e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-t8wql" Oct 07 08:28:19 crc kubenswrapper[4875]: I1007 08:28:19.326346 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/f7030085-e862-4c57-9c9e-29e88006533e-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-t8wql\" (UID: \"f7030085-e862-4c57-9c9e-29e88006533e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-t8wql" Oct 07 08:28:19 crc kubenswrapper[4875]: I1007 08:28:19.326385 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/f7030085-e862-4c57-9c9e-29e88006533e-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-t8wql\" (UID: \"f7030085-e862-4c57-9c9e-29e88006533e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-t8wql" Oct 07 08:28:19 crc kubenswrapper[4875]: I1007 08:28:19.326410 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/f7030085-e862-4c57-9c9e-29e88006533e-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-t8wql\" (UID: \"f7030085-e862-4c57-9c9e-29e88006533e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-t8wql" Oct 07 08:28:19 crc kubenswrapper[4875]: I1007 08:28:19.326451 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7030085-e862-4c57-9c9e-29e88006533e-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-t8wql\" (UID: \"f7030085-e862-4c57-9c9e-29e88006533e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-t8wql" Oct 07 08:28:19 crc kubenswrapper[4875]: I1007 08:28:19.428312 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7030085-e862-4c57-9c9e-29e88006533e-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-t8wql\" (UID: \"f7030085-e862-4c57-9c9e-29e88006533e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-t8wql" Oct 07 08:28:19 crc kubenswrapper[4875]: I1007 08:28:19.428364 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7030085-e862-4c57-9c9e-29e88006533e-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-t8wql\" (UID: \"f7030085-e862-4c57-9c9e-29e88006533e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-t8wql" Oct 07 08:28:19 crc kubenswrapper[4875]: I1007 08:28:19.428391 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/f7030085-e862-4c57-9c9e-29e88006533e-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-t8wql\" (UID: \"f7030085-e862-4c57-9c9e-29e88006533e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-t8wql" Oct 07 08:28:19 crc kubenswrapper[4875]: I1007 08:28:19.428432 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/f7030085-e862-4c57-9c9e-29e88006533e-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-t8wql\" (UID: \"f7030085-e862-4c57-9c9e-29e88006533e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-t8wql" Oct 07 08:28:19 crc kubenswrapper[4875]: I1007 08:28:19.428457 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/f7030085-e862-4c57-9c9e-29e88006533e-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-t8wql\" (UID: \"f7030085-e862-4c57-9c9e-29e88006533e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-t8wql" Oct 07 08:28:19 crc kubenswrapper[4875]: I1007 08:28:19.428490 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7030085-e862-4c57-9c9e-29e88006533e-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-t8wql\" (UID: \"f7030085-e862-4c57-9c9e-29e88006533e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-t8wql" Oct 07 08:28:19 crc kubenswrapper[4875]: I1007 08:28:19.428533 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7030085-e862-4c57-9c9e-29e88006533e-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-t8wql\" (UID: \"f7030085-e862-4c57-9c9e-29e88006533e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-t8wql" Oct 07 08:28:19 crc kubenswrapper[4875]: I1007 08:28:19.428563 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6lq5q\" (UniqueName: \"kubernetes.io/projected/f7030085-e862-4c57-9c9e-29e88006533e-kube-api-access-6lq5q\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-t8wql\" (UID: \"f7030085-e862-4c57-9c9e-29e88006533e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-t8wql" Oct 07 08:28:19 crc kubenswrapper[4875]: I1007 08:28:19.428597 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7030085-e862-4c57-9c9e-29e88006533e-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-t8wql\" (UID: \"f7030085-e862-4c57-9c9e-29e88006533e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-t8wql" Oct 07 08:28:19 crc kubenswrapper[4875]: I1007 08:28:19.428645 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/f7030085-e862-4c57-9c9e-29e88006533e-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-t8wql\" (UID: \"f7030085-e862-4c57-9c9e-29e88006533e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-t8wql" Oct 07 08:28:19 crc kubenswrapper[4875]: I1007 08:28:19.428668 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f7030085-e862-4c57-9c9e-29e88006533e-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-t8wql\" (UID: \"f7030085-e862-4c57-9c9e-29e88006533e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-t8wql" Oct 07 08:28:19 crc kubenswrapper[4875]: I1007 08:28:19.428696 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f7030085-e862-4c57-9c9e-29e88006533e-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-t8wql\" (UID: \"f7030085-e862-4c57-9c9e-29e88006533e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-t8wql" Oct 07 08:28:19 crc kubenswrapper[4875]: I1007 08:28:19.428716 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7030085-e862-4c57-9c9e-29e88006533e-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-t8wql\" (UID: \"f7030085-e862-4c57-9c9e-29e88006533e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-t8wql" Oct 07 08:28:19 crc kubenswrapper[4875]: I1007 08:28:19.428734 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7030085-e862-4c57-9c9e-29e88006533e-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-t8wql\" (UID: \"f7030085-e862-4c57-9c9e-29e88006533e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-t8wql" Oct 07 08:28:19 crc kubenswrapper[4875]: I1007 08:28:19.436705 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/f7030085-e862-4c57-9c9e-29e88006533e-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-t8wql\" (UID: \"f7030085-e862-4c57-9c9e-29e88006533e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-t8wql" Oct 07 08:28:19 crc kubenswrapper[4875]: I1007 08:28:19.437325 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/f7030085-e862-4c57-9c9e-29e88006533e-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-t8wql\" (UID: \"f7030085-e862-4c57-9c9e-29e88006533e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-t8wql" Oct 07 08:28:19 crc kubenswrapper[4875]: I1007 08:28:19.437870 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7030085-e862-4c57-9c9e-29e88006533e-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-t8wql\" (UID: \"f7030085-e862-4c57-9c9e-29e88006533e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-t8wql" Oct 07 08:28:19 crc kubenswrapper[4875]: I1007 08:28:19.439097 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7030085-e862-4c57-9c9e-29e88006533e-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-t8wql\" (UID: \"f7030085-e862-4c57-9c9e-29e88006533e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-t8wql" Oct 07 08:28:19 crc kubenswrapper[4875]: I1007 08:28:19.439672 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7030085-e862-4c57-9c9e-29e88006533e-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-t8wql\" (UID: \"f7030085-e862-4c57-9c9e-29e88006533e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-t8wql" Oct 07 08:28:19 crc kubenswrapper[4875]: I1007 08:28:19.439743 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7030085-e862-4c57-9c9e-29e88006533e-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-t8wql\" (UID: \"f7030085-e862-4c57-9c9e-29e88006533e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-t8wql" Oct 07 08:28:19 crc kubenswrapper[4875]: I1007 08:28:19.440207 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f7030085-e862-4c57-9c9e-29e88006533e-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-t8wql\" (UID: \"f7030085-e862-4c57-9c9e-29e88006533e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-t8wql" Oct 07 08:28:19 crc kubenswrapper[4875]: I1007 08:28:19.444289 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f7030085-e862-4c57-9c9e-29e88006533e-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-t8wql\" (UID: \"f7030085-e862-4c57-9c9e-29e88006533e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-t8wql" Oct 07 08:28:19 crc kubenswrapper[4875]: I1007 08:28:19.444381 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/f7030085-e862-4c57-9c9e-29e88006533e-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-t8wql\" (UID: \"f7030085-e862-4c57-9c9e-29e88006533e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-t8wql" Oct 07 08:28:19 crc kubenswrapper[4875]: I1007 08:28:19.444426 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7030085-e862-4c57-9c9e-29e88006533e-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-t8wql\" (UID: \"f7030085-e862-4c57-9c9e-29e88006533e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-t8wql" Oct 07 08:28:19 crc kubenswrapper[4875]: I1007 08:28:19.445500 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7030085-e862-4c57-9c9e-29e88006533e-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-t8wql\" (UID: \"f7030085-e862-4c57-9c9e-29e88006533e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-t8wql" Oct 07 08:28:19 crc kubenswrapper[4875]: I1007 08:28:19.446211 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7030085-e862-4c57-9c9e-29e88006533e-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-t8wql\" (UID: \"f7030085-e862-4c57-9c9e-29e88006533e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-t8wql" Oct 07 08:28:19 crc kubenswrapper[4875]: I1007 08:28:19.448373 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6lq5q\" (UniqueName: \"kubernetes.io/projected/f7030085-e862-4c57-9c9e-29e88006533e-kube-api-access-6lq5q\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-t8wql\" (UID: \"f7030085-e862-4c57-9c9e-29e88006533e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-t8wql" Oct 07 08:28:19 crc kubenswrapper[4875]: I1007 08:28:19.461817 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/f7030085-e862-4c57-9c9e-29e88006533e-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-t8wql\" (UID: \"f7030085-e862-4c57-9c9e-29e88006533e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-t8wql" Oct 07 08:28:19 crc kubenswrapper[4875]: I1007 08:28:19.515229 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-t8wql" Oct 07 08:28:20 crc kubenswrapper[4875]: I1007 08:28:20.074366 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-t8wql"] Oct 07 08:28:20 crc kubenswrapper[4875]: I1007 08:28:20.109055 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-t8wql" event={"ID":"f7030085-e862-4c57-9c9e-29e88006533e","Type":"ContainerStarted","Data":"870d6f972e58b883df8a1516ac26efb745bee09a82bff144c901120b42325a1c"} Oct 07 08:28:21 crc kubenswrapper[4875]: I1007 08:28:21.124716 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-t8wql" event={"ID":"f7030085-e862-4c57-9c9e-29e88006533e","Type":"ContainerStarted","Data":"c30dff2c196078bd2ff6d41d5ecd107062b2956f94e6ec2cd283b88da3a24fd8"} Oct 07 08:28:21 crc kubenswrapper[4875]: I1007 08:28:21.156457 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-t8wql" podStartSLOduration=1.682521327 podStartE2EDuration="2.156437183s" podCreationTimestamp="2025-10-07 08:28:19 +0000 UTC" firstStartedPulling="2025-10-07 08:28:20.080281948 +0000 UTC m=+1925.040052491" lastFinishedPulling="2025-10-07 08:28:20.554197764 +0000 UTC m=+1925.513968347" observedRunningTime="2025-10-07 08:28:21.152126376 +0000 UTC m=+1926.111896999" watchObservedRunningTime="2025-10-07 08:28:21.156437183 +0000 UTC m=+1926.116207736" Oct 07 08:29:01 crc kubenswrapper[4875]: I1007 08:29:01.220484 4875 patch_prober.go:28] interesting pod/machine-config-daemon-hx68m container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 07 08:29:01 crc kubenswrapper[4875]: I1007 08:29:01.221089 4875 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" podUID="3928c10c-c3da-41eb-96b2-629d67cfb31f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 07 08:29:05 crc kubenswrapper[4875]: I1007 08:29:05.542936 4875 generic.go:334] "Generic (PLEG): container finished" podID="f7030085-e862-4c57-9c9e-29e88006533e" containerID="c30dff2c196078bd2ff6d41d5ecd107062b2956f94e6ec2cd283b88da3a24fd8" exitCode=0 Oct 07 08:29:05 crc kubenswrapper[4875]: I1007 08:29:05.543061 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-t8wql" event={"ID":"f7030085-e862-4c57-9c9e-29e88006533e","Type":"ContainerDied","Data":"c30dff2c196078bd2ff6d41d5ecd107062b2956f94e6ec2cd283b88da3a24fd8"} Oct 07 08:29:06 crc kubenswrapper[4875]: I1007 08:29:06.976383 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-t8wql" Oct 07 08:29:07 crc kubenswrapper[4875]: I1007 08:29:07.030165 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7030085-e862-4c57-9c9e-29e88006533e-repo-setup-combined-ca-bundle\") pod \"f7030085-e862-4c57-9c9e-29e88006533e\" (UID: \"f7030085-e862-4c57-9c9e-29e88006533e\") " Oct 07 08:29:07 crc kubenswrapper[4875]: I1007 08:29:07.038301 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f7030085-e862-4c57-9c9e-29e88006533e-repo-setup-combined-ca-bundle" (OuterVolumeSpecName: "repo-setup-combined-ca-bundle") pod "f7030085-e862-4c57-9c9e-29e88006533e" (UID: "f7030085-e862-4c57-9c9e-29e88006533e"). InnerVolumeSpecName "repo-setup-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:29:07 crc kubenswrapper[4875]: I1007 08:29:07.130781 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6lq5q\" (UniqueName: \"kubernetes.io/projected/f7030085-e862-4c57-9c9e-29e88006533e-kube-api-access-6lq5q\") pod \"f7030085-e862-4c57-9c9e-29e88006533e\" (UID: \"f7030085-e862-4c57-9c9e-29e88006533e\") " Oct 07 08:29:07 crc kubenswrapper[4875]: I1007 08:29:07.130951 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7030085-e862-4c57-9c9e-29e88006533e-bootstrap-combined-ca-bundle\") pod \"f7030085-e862-4c57-9c9e-29e88006533e\" (UID: \"f7030085-e862-4c57-9c9e-29e88006533e\") " Oct 07 08:29:07 crc kubenswrapper[4875]: I1007 08:29:07.130982 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/f7030085-e862-4c57-9c9e-29e88006533e-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"f7030085-e862-4c57-9c9e-29e88006533e\" (UID: \"f7030085-e862-4c57-9c9e-29e88006533e\") " Oct 07 08:29:07 crc kubenswrapper[4875]: I1007 08:29:07.131003 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/f7030085-e862-4c57-9c9e-29e88006533e-openstack-edpm-ipam-ovn-default-certs-0\") pod \"f7030085-e862-4c57-9c9e-29e88006533e\" (UID: \"f7030085-e862-4c57-9c9e-29e88006533e\") " Oct 07 08:29:07 crc kubenswrapper[4875]: I1007 08:29:07.131038 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/f7030085-e862-4c57-9c9e-29e88006533e-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"f7030085-e862-4c57-9c9e-29e88006533e\" (UID: \"f7030085-e862-4c57-9c9e-29e88006533e\") " Oct 07 08:29:07 crc kubenswrapper[4875]: I1007 08:29:07.131067 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7030085-e862-4c57-9c9e-29e88006533e-neutron-metadata-combined-ca-bundle\") pod \"f7030085-e862-4c57-9c9e-29e88006533e\" (UID: \"f7030085-e862-4c57-9c9e-29e88006533e\") " Oct 07 08:29:07 crc kubenswrapper[4875]: I1007 08:29:07.131105 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7030085-e862-4c57-9c9e-29e88006533e-nova-combined-ca-bundle\") pod \"f7030085-e862-4c57-9c9e-29e88006533e\" (UID: \"f7030085-e862-4c57-9c9e-29e88006533e\") " Oct 07 08:29:07 crc kubenswrapper[4875]: I1007 08:29:07.131124 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/f7030085-e862-4c57-9c9e-29e88006533e-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"f7030085-e862-4c57-9c9e-29e88006533e\" (UID: \"f7030085-e862-4c57-9c9e-29e88006533e\") " Oct 07 08:29:07 crc kubenswrapper[4875]: I1007 08:29:07.131188 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f7030085-e862-4c57-9c9e-29e88006533e-inventory\") pod \"f7030085-e862-4c57-9c9e-29e88006533e\" (UID: \"f7030085-e862-4c57-9c9e-29e88006533e\") " Oct 07 08:29:07 crc kubenswrapper[4875]: I1007 08:29:07.131248 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7030085-e862-4c57-9c9e-29e88006533e-libvirt-combined-ca-bundle\") pod \"f7030085-e862-4c57-9c9e-29e88006533e\" (UID: \"f7030085-e862-4c57-9c9e-29e88006533e\") " Oct 07 08:29:07 crc kubenswrapper[4875]: I1007 08:29:07.131263 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f7030085-e862-4c57-9c9e-29e88006533e-ssh-key\") pod \"f7030085-e862-4c57-9c9e-29e88006533e\" (UID: \"f7030085-e862-4c57-9c9e-29e88006533e\") " Oct 07 08:29:07 crc kubenswrapper[4875]: I1007 08:29:07.131292 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7030085-e862-4c57-9c9e-29e88006533e-ovn-combined-ca-bundle\") pod \"f7030085-e862-4c57-9c9e-29e88006533e\" (UID: \"f7030085-e862-4c57-9c9e-29e88006533e\") " Oct 07 08:29:07 crc kubenswrapper[4875]: I1007 08:29:07.131328 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7030085-e862-4c57-9c9e-29e88006533e-telemetry-combined-ca-bundle\") pod \"f7030085-e862-4c57-9c9e-29e88006533e\" (UID: \"f7030085-e862-4c57-9c9e-29e88006533e\") " Oct 07 08:29:07 crc kubenswrapper[4875]: I1007 08:29:07.131712 4875 reconciler_common.go:293] "Volume detached for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7030085-e862-4c57-9c9e-29e88006533e-repo-setup-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 08:29:07 crc kubenswrapper[4875]: I1007 08:29:07.135969 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f7030085-e862-4c57-9c9e-29e88006533e-openstack-edpm-ipam-telemetry-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-telemetry-default-certs-0") pod "f7030085-e862-4c57-9c9e-29e88006533e" (UID: "f7030085-e862-4c57-9c9e-29e88006533e"). InnerVolumeSpecName "openstack-edpm-ipam-telemetry-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 08:29:07 crc kubenswrapper[4875]: I1007 08:29:07.136790 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f7030085-e862-4c57-9c9e-29e88006533e-telemetry-combined-ca-bundle" (OuterVolumeSpecName: "telemetry-combined-ca-bundle") pod "f7030085-e862-4c57-9c9e-29e88006533e" (UID: "f7030085-e862-4c57-9c9e-29e88006533e"). InnerVolumeSpecName "telemetry-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:29:07 crc kubenswrapper[4875]: I1007 08:29:07.136969 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f7030085-e862-4c57-9c9e-29e88006533e-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "f7030085-e862-4c57-9c9e-29e88006533e" (UID: "f7030085-e862-4c57-9c9e-29e88006533e"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:29:07 crc kubenswrapper[4875]: I1007 08:29:07.138117 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f7030085-e862-4c57-9c9e-29e88006533e-nova-combined-ca-bundle" (OuterVolumeSpecName: "nova-combined-ca-bundle") pod "f7030085-e862-4c57-9c9e-29e88006533e" (UID: "f7030085-e862-4c57-9c9e-29e88006533e"). InnerVolumeSpecName "nova-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:29:07 crc kubenswrapper[4875]: I1007 08:29:07.138238 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f7030085-e862-4c57-9c9e-29e88006533e-openstack-edpm-ipam-ovn-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-ovn-default-certs-0") pod "f7030085-e862-4c57-9c9e-29e88006533e" (UID: "f7030085-e862-4c57-9c9e-29e88006533e"). InnerVolumeSpecName "openstack-edpm-ipam-ovn-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 08:29:07 crc kubenswrapper[4875]: I1007 08:29:07.138425 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f7030085-e862-4c57-9c9e-29e88006533e-neutron-metadata-combined-ca-bundle" (OuterVolumeSpecName: "neutron-metadata-combined-ca-bundle") pod "f7030085-e862-4c57-9c9e-29e88006533e" (UID: "f7030085-e862-4c57-9c9e-29e88006533e"). InnerVolumeSpecName "neutron-metadata-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:29:07 crc kubenswrapper[4875]: I1007 08:29:07.139172 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f7030085-e862-4c57-9c9e-29e88006533e-kube-api-access-6lq5q" (OuterVolumeSpecName: "kube-api-access-6lq5q") pod "f7030085-e862-4c57-9c9e-29e88006533e" (UID: "f7030085-e862-4c57-9c9e-29e88006533e"). InnerVolumeSpecName "kube-api-access-6lq5q". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 08:29:07 crc kubenswrapper[4875]: I1007 08:29:07.139937 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f7030085-e862-4c57-9c9e-29e88006533e-openstack-edpm-ipam-neutron-metadata-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-neutron-metadata-default-certs-0") pod "f7030085-e862-4c57-9c9e-29e88006533e" (UID: "f7030085-e862-4c57-9c9e-29e88006533e"). InnerVolumeSpecName "openstack-edpm-ipam-neutron-metadata-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 08:29:07 crc kubenswrapper[4875]: I1007 08:29:07.142234 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f7030085-e862-4c57-9c9e-29e88006533e-openstack-edpm-ipam-libvirt-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-libvirt-default-certs-0") pod "f7030085-e862-4c57-9c9e-29e88006533e" (UID: "f7030085-e862-4c57-9c9e-29e88006533e"). InnerVolumeSpecName "openstack-edpm-ipam-libvirt-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 08:29:07 crc kubenswrapper[4875]: I1007 08:29:07.144916 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f7030085-e862-4c57-9c9e-29e88006533e-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "f7030085-e862-4c57-9c9e-29e88006533e" (UID: "f7030085-e862-4c57-9c9e-29e88006533e"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:29:07 crc kubenswrapper[4875]: I1007 08:29:07.145619 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f7030085-e862-4c57-9c9e-29e88006533e-libvirt-combined-ca-bundle" (OuterVolumeSpecName: "libvirt-combined-ca-bundle") pod "f7030085-e862-4c57-9c9e-29e88006533e" (UID: "f7030085-e862-4c57-9c9e-29e88006533e"). InnerVolumeSpecName "libvirt-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:29:07 crc kubenswrapper[4875]: I1007 08:29:07.161337 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f7030085-e862-4c57-9c9e-29e88006533e-inventory" (OuterVolumeSpecName: "inventory") pod "f7030085-e862-4c57-9c9e-29e88006533e" (UID: "f7030085-e862-4c57-9c9e-29e88006533e"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:29:07 crc kubenswrapper[4875]: I1007 08:29:07.162080 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f7030085-e862-4c57-9c9e-29e88006533e-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "f7030085-e862-4c57-9c9e-29e88006533e" (UID: "f7030085-e862-4c57-9c9e-29e88006533e"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:29:07 crc kubenswrapper[4875]: I1007 08:29:07.234620 4875 reconciler_common.go:293] "Volume detached for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7030085-e862-4c57-9c9e-29e88006533e-neutron-metadata-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 08:29:07 crc kubenswrapper[4875]: I1007 08:29:07.234654 4875 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/f7030085-e862-4c57-9c9e-29e88006533e-openstack-edpm-ipam-libvirt-default-certs-0\") on node \"crc\" DevicePath \"\"" Oct 07 08:29:07 crc kubenswrapper[4875]: I1007 08:29:07.234669 4875 reconciler_common.go:293] "Volume detached for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7030085-e862-4c57-9c9e-29e88006533e-nova-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 08:29:07 crc kubenswrapper[4875]: I1007 08:29:07.234683 4875 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f7030085-e862-4c57-9c9e-29e88006533e-inventory\") on node \"crc\" DevicePath \"\"" Oct 07 08:29:07 crc kubenswrapper[4875]: I1007 08:29:07.234715 4875 reconciler_common.go:293] "Volume detached for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7030085-e862-4c57-9c9e-29e88006533e-libvirt-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 08:29:07 crc kubenswrapper[4875]: I1007 08:29:07.234723 4875 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f7030085-e862-4c57-9c9e-29e88006533e-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 07 08:29:07 crc kubenswrapper[4875]: I1007 08:29:07.234732 4875 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7030085-e862-4c57-9c9e-29e88006533e-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 08:29:07 crc kubenswrapper[4875]: I1007 08:29:07.234740 4875 reconciler_common.go:293] "Volume detached for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7030085-e862-4c57-9c9e-29e88006533e-telemetry-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 08:29:07 crc kubenswrapper[4875]: I1007 08:29:07.234748 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6lq5q\" (UniqueName: \"kubernetes.io/projected/f7030085-e862-4c57-9c9e-29e88006533e-kube-api-access-6lq5q\") on node \"crc\" DevicePath \"\"" Oct 07 08:29:07 crc kubenswrapper[4875]: I1007 08:29:07.234756 4875 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7030085-e862-4c57-9c9e-29e88006533e-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 08:29:07 crc kubenswrapper[4875]: I1007 08:29:07.234764 4875 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/f7030085-e862-4c57-9c9e-29e88006533e-openstack-edpm-ipam-neutron-metadata-default-certs-0\") on node \"crc\" DevicePath \"\"" Oct 07 08:29:07 crc kubenswrapper[4875]: I1007 08:29:07.234775 4875 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/f7030085-e862-4c57-9c9e-29e88006533e-openstack-edpm-ipam-ovn-default-certs-0\") on node \"crc\" DevicePath \"\"" Oct 07 08:29:07 crc kubenswrapper[4875]: I1007 08:29:07.234784 4875 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/f7030085-e862-4c57-9c9e-29e88006533e-openstack-edpm-ipam-telemetry-default-certs-0\") on node \"crc\" DevicePath \"\"" Oct 07 08:29:07 crc kubenswrapper[4875]: I1007 08:29:07.563926 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-t8wql" event={"ID":"f7030085-e862-4c57-9c9e-29e88006533e","Type":"ContainerDied","Data":"870d6f972e58b883df8a1516ac26efb745bee09a82bff144c901120b42325a1c"} Oct 07 08:29:07 crc kubenswrapper[4875]: I1007 08:29:07.563985 4875 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="870d6f972e58b883df8a1516ac26efb745bee09a82bff144c901120b42325a1c" Oct 07 08:29:07 crc kubenswrapper[4875]: I1007 08:29:07.564054 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-t8wql" Oct 07 08:29:07 crc kubenswrapper[4875]: I1007 08:29:07.672212 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-mrzjf"] Oct 07 08:29:07 crc kubenswrapper[4875]: E1007 08:29:07.672642 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f7030085-e862-4c57-9c9e-29e88006533e" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Oct 07 08:29:07 crc kubenswrapper[4875]: I1007 08:29:07.672663 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="f7030085-e862-4c57-9c9e-29e88006533e" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Oct 07 08:29:07 crc kubenswrapper[4875]: I1007 08:29:07.672898 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="f7030085-e862-4c57-9c9e-29e88006533e" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Oct 07 08:29:07 crc kubenswrapper[4875]: I1007 08:29:07.673611 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-mrzjf" Oct 07 08:29:07 crc kubenswrapper[4875]: I1007 08:29:07.677092 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 07 08:29:07 crc kubenswrapper[4875]: I1007 08:29:07.677106 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-config" Oct 07 08:29:07 crc kubenswrapper[4875]: I1007 08:29:07.677151 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 07 08:29:07 crc kubenswrapper[4875]: I1007 08:29:07.677160 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-bl4pq" Oct 07 08:29:07 crc kubenswrapper[4875]: I1007 08:29:07.677263 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 07 08:29:07 crc kubenswrapper[4875]: I1007 08:29:07.685095 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-mrzjf"] Oct 07 08:29:07 crc kubenswrapper[4875]: I1007 08:29:07.747633 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/97901af5-a883-4d4f-acd8-9425772903a9-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-mrzjf\" (UID: \"97901af5-a883-4d4f-acd8-9425772903a9\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-mrzjf" Oct 07 08:29:07 crc kubenswrapper[4875]: I1007 08:29:07.748225 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/97901af5-a883-4d4f-acd8-9425772903a9-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-mrzjf\" (UID: \"97901af5-a883-4d4f-acd8-9425772903a9\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-mrzjf" Oct 07 08:29:07 crc kubenswrapper[4875]: I1007 08:29:07.748451 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ct24w\" (UniqueName: \"kubernetes.io/projected/97901af5-a883-4d4f-acd8-9425772903a9-kube-api-access-ct24w\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-mrzjf\" (UID: \"97901af5-a883-4d4f-acd8-9425772903a9\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-mrzjf" Oct 07 08:29:07 crc kubenswrapper[4875]: I1007 08:29:07.748499 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/97901af5-a883-4d4f-acd8-9425772903a9-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-mrzjf\" (UID: \"97901af5-a883-4d4f-acd8-9425772903a9\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-mrzjf" Oct 07 08:29:07 crc kubenswrapper[4875]: I1007 08:29:07.748870 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/97901af5-a883-4d4f-acd8-9425772903a9-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-mrzjf\" (UID: \"97901af5-a883-4d4f-acd8-9425772903a9\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-mrzjf" Oct 07 08:29:07 crc kubenswrapper[4875]: I1007 08:29:07.851488 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/97901af5-a883-4d4f-acd8-9425772903a9-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-mrzjf\" (UID: \"97901af5-a883-4d4f-acd8-9425772903a9\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-mrzjf" Oct 07 08:29:07 crc kubenswrapper[4875]: I1007 08:29:07.851577 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ct24w\" (UniqueName: \"kubernetes.io/projected/97901af5-a883-4d4f-acd8-9425772903a9-kube-api-access-ct24w\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-mrzjf\" (UID: \"97901af5-a883-4d4f-acd8-9425772903a9\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-mrzjf" Oct 07 08:29:07 crc kubenswrapper[4875]: I1007 08:29:07.851604 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/97901af5-a883-4d4f-acd8-9425772903a9-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-mrzjf\" (UID: \"97901af5-a883-4d4f-acd8-9425772903a9\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-mrzjf" Oct 07 08:29:07 crc kubenswrapper[4875]: I1007 08:29:07.851677 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/97901af5-a883-4d4f-acd8-9425772903a9-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-mrzjf\" (UID: \"97901af5-a883-4d4f-acd8-9425772903a9\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-mrzjf" Oct 07 08:29:07 crc kubenswrapper[4875]: I1007 08:29:07.851725 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/97901af5-a883-4d4f-acd8-9425772903a9-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-mrzjf\" (UID: \"97901af5-a883-4d4f-acd8-9425772903a9\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-mrzjf" Oct 07 08:29:07 crc kubenswrapper[4875]: I1007 08:29:07.854001 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/97901af5-a883-4d4f-acd8-9425772903a9-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-mrzjf\" (UID: \"97901af5-a883-4d4f-acd8-9425772903a9\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-mrzjf" Oct 07 08:29:07 crc kubenswrapper[4875]: I1007 08:29:07.857476 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/97901af5-a883-4d4f-acd8-9425772903a9-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-mrzjf\" (UID: \"97901af5-a883-4d4f-acd8-9425772903a9\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-mrzjf" Oct 07 08:29:07 crc kubenswrapper[4875]: I1007 08:29:07.857705 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/97901af5-a883-4d4f-acd8-9425772903a9-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-mrzjf\" (UID: \"97901af5-a883-4d4f-acd8-9425772903a9\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-mrzjf" Oct 07 08:29:07 crc kubenswrapper[4875]: I1007 08:29:07.858098 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/97901af5-a883-4d4f-acd8-9425772903a9-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-mrzjf\" (UID: \"97901af5-a883-4d4f-acd8-9425772903a9\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-mrzjf" Oct 07 08:29:07 crc kubenswrapper[4875]: I1007 08:29:07.873669 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ct24w\" (UniqueName: \"kubernetes.io/projected/97901af5-a883-4d4f-acd8-9425772903a9-kube-api-access-ct24w\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-mrzjf\" (UID: \"97901af5-a883-4d4f-acd8-9425772903a9\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-mrzjf" Oct 07 08:29:08 crc kubenswrapper[4875]: I1007 08:29:08.009170 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-mrzjf" Oct 07 08:29:08 crc kubenswrapper[4875]: I1007 08:29:08.499989 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-mrzjf"] Oct 07 08:29:08 crc kubenswrapper[4875]: I1007 08:29:08.509461 4875 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 07 08:29:08 crc kubenswrapper[4875]: I1007 08:29:08.572306 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-mrzjf" event={"ID":"97901af5-a883-4d4f-acd8-9425772903a9","Type":"ContainerStarted","Data":"0628658cb737f70ef9b0033656f13dd62d538ec057975be5ef38bd71dc843243"} Oct 07 08:29:09 crc kubenswrapper[4875]: I1007 08:29:09.581319 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-mrzjf" event={"ID":"97901af5-a883-4d4f-acd8-9425772903a9","Type":"ContainerStarted","Data":"1c87f62434522b8808d759a5bdef8e8feeab45bb9c655809365cc213c87f1000"} Oct 07 08:29:09 crc kubenswrapper[4875]: I1007 08:29:09.601678 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-mrzjf" podStartSLOduration=1.98766355 podStartE2EDuration="2.601661116s" podCreationTimestamp="2025-10-07 08:29:07 +0000 UTC" firstStartedPulling="2025-10-07 08:29:08.509063036 +0000 UTC m=+1973.468833589" lastFinishedPulling="2025-10-07 08:29:09.123060612 +0000 UTC m=+1974.082831155" observedRunningTime="2025-10-07 08:29:09.597117901 +0000 UTC m=+1974.556888464" watchObservedRunningTime="2025-10-07 08:29:09.601661116 +0000 UTC m=+1974.561431649" Oct 07 08:29:31 crc kubenswrapper[4875]: I1007 08:29:31.221062 4875 patch_prober.go:28] interesting pod/machine-config-daemon-hx68m container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 07 08:29:31 crc kubenswrapper[4875]: I1007 08:29:31.221608 4875 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" podUID="3928c10c-c3da-41eb-96b2-629d67cfb31f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 07 08:30:00 crc kubenswrapper[4875]: I1007 08:30:00.154322 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29330430-w48kh"] Oct 07 08:30:00 crc kubenswrapper[4875]: I1007 08:30:00.156138 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29330430-w48kh" Oct 07 08:30:00 crc kubenswrapper[4875]: I1007 08:30:00.159050 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Oct 07 08:30:00 crc kubenswrapper[4875]: I1007 08:30:00.159703 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Oct 07 08:30:00 crc kubenswrapper[4875]: I1007 08:30:00.166318 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29330430-w48kh"] Oct 07 08:30:00 crc kubenswrapper[4875]: I1007 08:30:00.248893 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b3acee46-ab06-4803-a585-7ddb2befb5ff-config-volume\") pod \"collect-profiles-29330430-w48kh\" (UID: \"b3acee46-ab06-4803-a585-7ddb2befb5ff\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29330430-w48kh" Oct 07 08:30:00 crc kubenswrapper[4875]: I1007 08:30:00.249391 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pjczx\" (UniqueName: \"kubernetes.io/projected/b3acee46-ab06-4803-a585-7ddb2befb5ff-kube-api-access-pjczx\") pod \"collect-profiles-29330430-w48kh\" (UID: \"b3acee46-ab06-4803-a585-7ddb2befb5ff\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29330430-w48kh" Oct 07 08:30:00 crc kubenswrapper[4875]: I1007 08:30:00.249580 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/b3acee46-ab06-4803-a585-7ddb2befb5ff-secret-volume\") pod \"collect-profiles-29330430-w48kh\" (UID: \"b3acee46-ab06-4803-a585-7ddb2befb5ff\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29330430-w48kh" Oct 07 08:30:00 crc kubenswrapper[4875]: I1007 08:30:00.351732 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pjczx\" (UniqueName: \"kubernetes.io/projected/b3acee46-ab06-4803-a585-7ddb2befb5ff-kube-api-access-pjczx\") pod \"collect-profiles-29330430-w48kh\" (UID: \"b3acee46-ab06-4803-a585-7ddb2befb5ff\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29330430-w48kh" Oct 07 08:30:00 crc kubenswrapper[4875]: I1007 08:30:00.352028 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/b3acee46-ab06-4803-a585-7ddb2befb5ff-secret-volume\") pod \"collect-profiles-29330430-w48kh\" (UID: \"b3acee46-ab06-4803-a585-7ddb2befb5ff\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29330430-w48kh" Oct 07 08:30:00 crc kubenswrapper[4875]: I1007 08:30:00.352191 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b3acee46-ab06-4803-a585-7ddb2befb5ff-config-volume\") pod \"collect-profiles-29330430-w48kh\" (UID: \"b3acee46-ab06-4803-a585-7ddb2befb5ff\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29330430-w48kh" Oct 07 08:30:00 crc kubenswrapper[4875]: I1007 08:30:00.353436 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b3acee46-ab06-4803-a585-7ddb2befb5ff-config-volume\") pod \"collect-profiles-29330430-w48kh\" (UID: \"b3acee46-ab06-4803-a585-7ddb2befb5ff\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29330430-w48kh" Oct 07 08:30:00 crc kubenswrapper[4875]: I1007 08:30:00.360940 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/b3acee46-ab06-4803-a585-7ddb2befb5ff-secret-volume\") pod \"collect-profiles-29330430-w48kh\" (UID: \"b3acee46-ab06-4803-a585-7ddb2befb5ff\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29330430-w48kh" Oct 07 08:30:00 crc kubenswrapper[4875]: I1007 08:30:00.385534 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pjczx\" (UniqueName: \"kubernetes.io/projected/b3acee46-ab06-4803-a585-7ddb2befb5ff-kube-api-access-pjczx\") pod \"collect-profiles-29330430-w48kh\" (UID: \"b3acee46-ab06-4803-a585-7ddb2befb5ff\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29330430-w48kh" Oct 07 08:30:00 crc kubenswrapper[4875]: I1007 08:30:00.519009 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29330430-w48kh" Oct 07 08:30:00 crc kubenswrapper[4875]: I1007 08:30:00.967081 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29330430-w48kh"] Oct 07 08:30:01 crc kubenswrapper[4875]: I1007 08:30:01.067373 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29330430-w48kh" event={"ID":"b3acee46-ab06-4803-a585-7ddb2befb5ff","Type":"ContainerStarted","Data":"0ea3145874801823a0b66f2332842fcb4a9c3d19276f2fe46ca44cf2e95f7fa3"} Oct 07 08:30:01 crc kubenswrapper[4875]: I1007 08:30:01.221494 4875 patch_prober.go:28] interesting pod/machine-config-daemon-hx68m container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 07 08:30:01 crc kubenswrapper[4875]: I1007 08:30:01.223031 4875 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" podUID="3928c10c-c3da-41eb-96b2-629d67cfb31f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 07 08:30:01 crc kubenswrapper[4875]: I1007 08:30:01.223193 4875 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" Oct 07 08:30:01 crc kubenswrapper[4875]: I1007 08:30:01.224107 4875 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"d9995d2c34b022ebc0e71bdaef75f97ecc970ab076a5c2eb0143ce90dc857f1a"} pod="openshift-machine-config-operator/machine-config-daemon-hx68m" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 07 08:30:01 crc kubenswrapper[4875]: I1007 08:30:01.224312 4875 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" podUID="3928c10c-c3da-41eb-96b2-629d67cfb31f" containerName="machine-config-daemon" containerID="cri-o://d9995d2c34b022ebc0e71bdaef75f97ecc970ab076a5c2eb0143ce90dc857f1a" gracePeriod=600 Oct 07 08:30:02 crc kubenswrapper[4875]: I1007 08:30:02.075702 4875 generic.go:334] "Generic (PLEG): container finished" podID="b3acee46-ab06-4803-a585-7ddb2befb5ff" containerID="6fffc5d9403c03941c3023e6c76f248c6dc4f9bd45f2abde637eb8a034384135" exitCode=0 Oct 07 08:30:02 crc kubenswrapper[4875]: I1007 08:30:02.075838 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29330430-w48kh" event={"ID":"b3acee46-ab06-4803-a585-7ddb2befb5ff","Type":"ContainerDied","Data":"6fffc5d9403c03941c3023e6c76f248c6dc4f9bd45f2abde637eb8a034384135"} Oct 07 08:30:02 crc kubenswrapper[4875]: I1007 08:30:02.079025 4875 generic.go:334] "Generic (PLEG): container finished" podID="3928c10c-c3da-41eb-96b2-629d67cfb31f" containerID="d9995d2c34b022ebc0e71bdaef75f97ecc970ab076a5c2eb0143ce90dc857f1a" exitCode=0 Oct 07 08:30:02 crc kubenswrapper[4875]: I1007 08:30:02.079064 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" event={"ID":"3928c10c-c3da-41eb-96b2-629d67cfb31f","Type":"ContainerDied","Data":"d9995d2c34b022ebc0e71bdaef75f97ecc970ab076a5c2eb0143ce90dc857f1a"} Oct 07 08:30:02 crc kubenswrapper[4875]: I1007 08:30:02.079088 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" event={"ID":"3928c10c-c3da-41eb-96b2-629d67cfb31f","Type":"ContainerStarted","Data":"c61b9f79db1f75afdfdc3343c9fbda35db032a5a8f913522757b64d90f7ef515"} Oct 07 08:30:02 crc kubenswrapper[4875]: I1007 08:30:02.079104 4875 scope.go:117] "RemoveContainer" containerID="e7a8b49c8a3c1b65fb51ee691d301a717bb3f37d230c7e4911b755a2a993aaef" Oct 07 08:30:03 crc kubenswrapper[4875]: I1007 08:30:03.465029 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29330430-w48kh" Oct 07 08:30:03 crc kubenswrapper[4875]: I1007 08:30:03.625752 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/b3acee46-ab06-4803-a585-7ddb2befb5ff-secret-volume\") pod \"b3acee46-ab06-4803-a585-7ddb2befb5ff\" (UID: \"b3acee46-ab06-4803-a585-7ddb2befb5ff\") " Oct 07 08:30:03 crc kubenswrapper[4875]: I1007 08:30:03.625982 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pjczx\" (UniqueName: \"kubernetes.io/projected/b3acee46-ab06-4803-a585-7ddb2befb5ff-kube-api-access-pjczx\") pod \"b3acee46-ab06-4803-a585-7ddb2befb5ff\" (UID: \"b3acee46-ab06-4803-a585-7ddb2befb5ff\") " Oct 07 08:30:03 crc kubenswrapper[4875]: I1007 08:30:03.626007 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b3acee46-ab06-4803-a585-7ddb2befb5ff-config-volume\") pod \"b3acee46-ab06-4803-a585-7ddb2befb5ff\" (UID: \"b3acee46-ab06-4803-a585-7ddb2befb5ff\") " Oct 07 08:30:03 crc kubenswrapper[4875]: I1007 08:30:03.626788 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b3acee46-ab06-4803-a585-7ddb2befb5ff-config-volume" (OuterVolumeSpecName: "config-volume") pod "b3acee46-ab06-4803-a585-7ddb2befb5ff" (UID: "b3acee46-ab06-4803-a585-7ddb2befb5ff"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 08:30:03 crc kubenswrapper[4875]: I1007 08:30:03.632949 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b3acee46-ab06-4803-a585-7ddb2befb5ff-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "b3acee46-ab06-4803-a585-7ddb2befb5ff" (UID: "b3acee46-ab06-4803-a585-7ddb2befb5ff"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:30:03 crc kubenswrapper[4875]: I1007 08:30:03.634479 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b3acee46-ab06-4803-a585-7ddb2befb5ff-kube-api-access-pjczx" (OuterVolumeSpecName: "kube-api-access-pjczx") pod "b3acee46-ab06-4803-a585-7ddb2befb5ff" (UID: "b3acee46-ab06-4803-a585-7ddb2befb5ff"). InnerVolumeSpecName "kube-api-access-pjczx". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 08:30:03 crc kubenswrapper[4875]: I1007 08:30:03.727688 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pjczx\" (UniqueName: \"kubernetes.io/projected/b3acee46-ab06-4803-a585-7ddb2befb5ff-kube-api-access-pjczx\") on node \"crc\" DevicePath \"\"" Oct 07 08:30:03 crc kubenswrapper[4875]: I1007 08:30:03.727723 4875 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b3acee46-ab06-4803-a585-7ddb2befb5ff-config-volume\") on node \"crc\" DevicePath \"\"" Oct 07 08:30:03 crc kubenswrapper[4875]: I1007 08:30:03.727733 4875 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/b3acee46-ab06-4803-a585-7ddb2befb5ff-secret-volume\") on node \"crc\" DevicePath \"\"" Oct 07 08:30:04 crc kubenswrapper[4875]: I1007 08:30:04.100129 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29330430-w48kh" event={"ID":"b3acee46-ab06-4803-a585-7ddb2befb5ff","Type":"ContainerDied","Data":"0ea3145874801823a0b66f2332842fcb4a9c3d19276f2fe46ca44cf2e95f7fa3"} Oct 07 08:30:04 crc kubenswrapper[4875]: I1007 08:30:04.100434 4875 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0ea3145874801823a0b66f2332842fcb4a9c3d19276f2fe46ca44cf2e95f7fa3" Oct 07 08:30:04 crc kubenswrapper[4875]: I1007 08:30:04.100192 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29330430-w48kh" Oct 07 08:30:04 crc kubenswrapper[4875]: I1007 08:30:04.546549 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29330385-lc8x5"] Oct 07 08:30:04 crc kubenswrapper[4875]: I1007 08:30:04.555555 4875 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29330385-lc8x5"] Oct 07 08:30:05 crc kubenswrapper[4875]: I1007 08:30:05.706853 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2da55ba3-88e7-4cda-8ad9-b5945e39e991" path="/var/lib/kubelet/pods/2da55ba3-88e7-4cda-8ad9-b5945e39e991/volumes" Oct 07 08:30:20 crc kubenswrapper[4875]: I1007 08:30:20.266341 4875 generic.go:334] "Generic (PLEG): container finished" podID="97901af5-a883-4d4f-acd8-9425772903a9" containerID="1c87f62434522b8808d759a5bdef8e8feeab45bb9c655809365cc213c87f1000" exitCode=0 Oct 07 08:30:20 crc kubenswrapper[4875]: I1007 08:30:20.266492 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-mrzjf" event={"ID":"97901af5-a883-4d4f-acd8-9425772903a9","Type":"ContainerDied","Data":"1c87f62434522b8808d759a5bdef8e8feeab45bb9c655809365cc213c87f1000"} Oct 07 08:30:20 crc kubenswrapper[4875]: I1007 08:30:20.458690 4875 scope.go:117] "RemoveContainer" containerID="bbdb2f9ebc9cb3dde8e0fb046436b4bacec8657642ffd439302fcc99b2579d3e" Oct 07 08:30:21 crc kubenswrapper[4875]: I1007 08:30:21.782957 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-mrzjf" Oct 07 08:30:21 crc kubenswrapper[4875]: I1007 08:30:21.921162 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/97901af5-a883-4d4f-acd8-9425772903a9-ssh-key\") pod \"97901af5-a883-4d4f-acd8-9425772903a9\" (UID: \"97901af5-a883-4d4f-acd8-9425772903a9\") " Oct 07 08:30:21 crc kubenswrapper[4875]: I1007 08:30:21.921600 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/97901af5-a883-4d4f-acd8-9425772903a9-inventory\") pod \"97901af5-a883-4d4f-acd8-9425772903a9\" (UID: \"97901af5-a883-4d4f-acd8-9425772903a9\") " Oct 07 08:30:21 crc kubenswrapper[4875]: I1007 08:30:21.921773 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ct24w\" (UniqueName: \"kubernetes.io/projected/97901af5-a883-4d4f-acd8-9425772903a9-kube-api-access-ct24w\") pod \"97901af5-a883-4d4f-acd8-9425772903a9\" (UID: \"97901af5-a883-4d4f-acd8-9425772903a9\") " Oct 07 08:30:21 crc kubenswrapper[4875]: I1007 08:30:21.922293 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/97901af5-a883-4d4f-acd8-9425772903a9-ovn-combined-ca-bundle\") pod \"97901af5-a883-4d4f-acd8-9425772903a9\" (UID: \"97901af5-a883-4d4f-acd8-9425772903a9\") " Oct 07 08:30:21 crc kubenswrapper[4875]: I1007 08:30:21.922399 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/97901af5-a883-4d4f-acd8-9425772903a9-ovncontroller-config-0\") pod \"97901af5-a883-4d4f-acd8-9425772903a9\" (UID: \"97901af5-a883-4d4f-acd8-9425772903a9\") " Oct 07 08:30:21 crc kubenswrapper[4875]: I1007 08:30:21.927425 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/97901af5-a883-4d4f-acd8-9425772903a9-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "97901af5-a883-4d4f-acd8-9425772903a9" (UID: "97901af5-a883-4d4f-acd8-9425772903a9"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:30:21 crc kubenswrapper[4875]: I1007 08:30:21.928743 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/97901af5-a883-4d4f-acd8-9425772903a9-kube-api-access-ct24w" (OuterVolumeSpecName: "kube-api-access-ct24w") pod "97901af5-a883-4d4f-acd8-9425772903a9" (UID: "97901af5-a883-4d4f-acd8-9425772903a9"). InnerVolumeSpecName "kube-api-access-ct24w". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 08:30:21 crc kubenswrapper[4875]: I1007 08:30:21.948722 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/97901af5-a883-4d4f-acd8-9425772903a9-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "97901af5-a883-4d4f-acd8-9425772903a9" (UID: "97901af5-a883-4d4f-acd8-9425772903a9"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:30:21 crc kubenswrapper[4875]: I1007 08:30:21.949502 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/97901af5-a883-4d4f-acd8-9425772903a9-ovncontroller-config-0" (OuterVolumeSpecName: "ovncontroller-config-0") pod "97901af5-a883-4d4f-acd8-9425772903a9" (UID: "97901af5-a883-4d4f-acd8-9425772903a9"). InnerVolumeSpecName "ovncontroller-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 08:30:21 crc kubenswrapper[4875]: I1007 08:30:21.980803 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/97901af5-a883-4d4f-acd8-9425772903a9-inventory" (OuterVolumeSpecName: "inventory") pod "97901af5-a883-4d4f-acd8-9425772903a9" (UID: "97901af5-a883-4d4f-acd8-9425772903a9"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:30:22 crc kubenswrapper[4875]: I1007 08:30:22.025257 4875 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/97901af5-a883-4d4f-acd8-9425772903a9-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 08:30:22 crc kubenswrapper[4875]: I1007 08:30:22.025301 4875 reconciler_common.go:293] "Volume detached for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/97901af5-a883-4d4f-acd8-9425772903a9-ovncontroller-config-0\") on node \"crc\" DevicePath \"\"" Oct 07 08:30:22 crc kubenswrapper[4875]: I1007 08:30:22.025314 4875 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/97901af5-a883-4d4f-acd8-9425772903a9-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 07 08:30:22 crc kubenswrapper[4875]: I1007 08:30:22.025328 4875 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/97901af5-a883-4d4f-acd8-9425772903a9-inventory\") on node \"crc\" DevicePath \"\"" Oct 07 08:30:22 crc kubenswrapper[4875]: I1007 08:30:22.025339 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ct24w\" (UniqueName: \"kubernetes.io/projected/97901af5-a883-4d4f-acd8-9425772903a9-kube-api-access-ct24w\") on node \"crc\" DevicePath \"\"" Oct 07 08:30:22 crc kubenswrapper[4875]: I1007 08:30:22.282991 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-mrzjf" event={"ID":"97901af5-a883-4d4f-acd8-9425772903a9","Type":"ContainerDied","Data":"0628658cb737f70ef9b0033656f13dd62d538ec057975be5ef38bd71dc843243"} Oct 07 08:30:22 crc kubenswrapper[4875]: I1007 08:30:22.283033 4875 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0628658cb737f70ef9b0033656f13dd62d538ec057975be5ef38bd71dc843243" Oct 07 08:30:22 crc kubenswrapper[4875]: I1007 08:30:22.283067 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-mrzjf" Oct 07 08:30:22 crc kubenswrapper[4875]: I1007 08:30:22.374904 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-wh5wz"] Oct 07 08:30:22 crc kubenswrapper[4875]: E1007 08:30:22.375300 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="97901af5-a883-4d4f-acd8-9425772903a9" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Oct 07 08:30:22 crc kubenswrapper[4875]: I1007 08:30:22.375315 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="97901af5-a883-4d4f-acd8-9425772903a9" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Oct 07 08:30:22 crc kubenswrapper[4875]: E1007 08:30:22.375357 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b3acee46-ab06-4803-a585-7ddb2befb5ff" containerName="collect-profiles" Oct 07 08:30:22 crc kubenswrapper[4875]: I1007 08:30:22.375363 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="b3acee46-ab06-4803-a585-7ddb2befb5ff" containerName="collect-profiles" Oct 07 08:30:22 crc kubenswrapper[4875]: I1007 08:30:22.375529 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="b3acee46-ab06-4803-a585-7ddb2befb5ff" containerName="collect-profiles" Oct 07 08:30:22 crc kubenswrapper[4875]: I1007 08:30:22.375561 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="97901af5-a883-4d4f-acd8-9425772903a9" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Oct 07 08:30:22 crc kubenswrapper[4875]: I1007 08:30:22.376224 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-wh5wz" Oct 07 08:30:22 crc kubenswrapper[4875]: I1007 08:30:22.378417 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-ovn-metadata-agent-neutron-config" Oct 07 08:30:22 crc kubenswrapper[4875]: I1007 08:30:22.378560 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-neutron-config" Oct 07 08:30:22 crc kubenswrapper[4875]: I1007 08:30:22.379066 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 07 08:30:22 crc kubenswrapper[4875]: I1007 08:30:22.381896 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 07 08:30:22 crc kubenswrapper[4875]: I1007 08:30:22.382353 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 07 08:30:22 crc kubenswrapper[4875]: I1007 08:30:22.382746 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-bl4pq" Oct 07 08:30:22 crc kubenswrapper[4875]: I1007 08:30:22.382946 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-wh5wz"] Oct 07 08:30:22 crc kubenswrapper[4875]: I1007 08:30:22.537957 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/c6df5d2d-85d8-4d79-b8a2-f3b6f7060019-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-wh5wz\" (UID: \"c6df5d2d-85d8-4d79-b8a2-f3b6f7060019\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-wh5wz" Oct 07 08:30:22 crc kubenswrapper[4875]: I1007 08:30:22.538362 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5k7lg\" (UniqueName: \"kubernetes.io/projected/c6df5d2d-85d8-4d79-b8a2-f3b6f7060019-kube-api-access-5k7lg\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-wh5wz\" (UID: \"c6df5d2d-85d8-4d79-b8a2-f3b6f7060019\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-wh5wz" Oct 07 08:30:22 crc kubenswrapper[4875]: I1007 08:30:22.538494 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/c6df5d2d-85d8-4d79-b8a2-f3b6f7060019-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-wh5wz\" (UID: \"c6df5d2d-85d8-4d79-b8a2-f3b6f7060019\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-wh5wz" Oct 07 08:30:22 crc kubenswrapper[4875]: I1007 08:30:22.538599 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c6df5d2d-85d8-4d79-b8a2-f3b6f7060019-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-wh5wz\" (UID: \"c6df5d2d-85d8-4d79-b8a2-f3b6f7060019\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-wh5wz" Oct 07 08:30:22 crc kubenswrapper[4875]: I1007 08:30:22.538756 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c6df5d2d-85d8-4d79-b8a2-f3b6f7060019-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-wh5wz\" (UID: \"c6df5d2d-85d8-4d79-b8a2-f3b6f7060019\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-wh5wz" Oct 07 08:30:22 crc kubenswrapper[4875]: I1007 08:30:22.538947 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c6df5d2d-85d8-4d79-b8a2-f3b6f7060019-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-wh5wz\" (UID: \"c6df5d2d-85d8-4d79-b8a2-f3b6f7060019\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-wh5wz" Oct 07 08:30:22 crc kubenswrapper[4875]: I1007 08:30:22.640773 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/c6df5d2d-85d8-4d79-b8a2-f3b6f7060019-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-wh5wz\" (UID: \"c6df5d2d-85d8-4d79-b8a2-f3b6f7060019\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-wh5wz" Oct 07 08:30:22 crc kubenswrapper[4875]: I1007 08:30:22.640824 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c6df5d2d-85d8-4d79-b8a2-f3b6f7060019-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-wh5wz\" (UID: \"c6df5d2d-85d8-4d79-b8a2-f3b6f7060019\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-wh5wz" Oct 07 08:30:22 crc kubenswrapper[4875]: I1007 08:30:22.640926 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c6df5d2d-85d8-4d79-b8a2-f3b6f7060019-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-wh5wz\" (UID: \"c6df5d2d-85d8-4d79-b8a2-f3b6f7060019\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-wh5wz" Oct 07 08:30:22 crc kubenswrapper[4875]: I1007 08:30:22.641488 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c6df5d2d-85d8-4d79-b8a2-f3b6f7060019-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-wh5wz\" (UID: \"c6df5d2d-85d8-4d79-b8a2-f3b6f7060019\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-wh5wz" Oct 07 08:30:22 crc kubenswrapper[4875]: I1007 08:30:22.641601 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/c6df5d2d-85d8-4d79-b8a2-f3b6f7060019-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-wh5wz\" (UID: \"c6df5d2d-85d8-4d79-b8a2-f3b6f7060019\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-wh5wz" Oct 07 08:30:22 crc kubenswrapper[4875]: I1007 08:30:22.641651 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5k7lg\" (UniqueName: \"kubernetes.io/projected/c6df5d2d-85d8-4d79-b8a2-f3b6f7060019-kube-api-access-5k7lg\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-wh5wz\" (UID: \"c6df5d2d-85d8-4d79-b8a2-f3b6f7060019\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-wh5wz" Oct 07 08:30:22 crc kubenswrapper[4875]: I1007 08:30:22.645217 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/c6df5d2d-85d8-4d79-b8a2-f3b6f7060019-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-wh5wz\" (UID: \"c6df5d2d-85d8-4d79-b8a2-f3b6f7060019\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-wh5wz" Oct 07 08:30:22 crc kubenswrapper[4875]: I1007 08:30:22.645244 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/c6df5d2d-85d8-4d79-b8a2-f3b6f7060019-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-wh5wz\" (UID: \"c6df5d2d-85d8-4d79-b8a2-f3b6f7060019\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-wh5wz" Oct 07 08:30:22 crc kubenswrapper[4875]: I1007 08:30:22.646332 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c6df5d2d-85d8-4d79-b8a2-f3b6f7060019-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-wh5wz\" (UID: \"c6df5d2d-85d8-4d79-b8a2-f3b6f7060019\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-wh5wz" Oct 07 08:30:22 crc kubenswrapper[4875]: I1007 08:30:22.646478 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c6df5d2d-85d8-4d79-b8a2-f3b6f7060019-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-wh5wz\" (UID: \"c6df5d2d-85d8-4d79-b8a2-f3b6f7060019\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-wh5wz" Oct 07 08:30:22 crc kubenswrapper[4875]: I1007 08:30:22.647968 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c6df5d2d-85d8-4d79-b8a2-f3b6f7060019-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-wh5wz\" (UID: \"c6df5d2d-85d8-4d79-b8a2-f3b6f7060019\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-wh5wz" Oct 07 08:30:22 crc kubenswrapper[4875]: I1007 08:30:22.664693 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5k7lg\" (UniqueName: \"kubernetes.io/projected/c6df5d2d-85d8-4d79-b8a2-f3b6f7060019-kube-api-access-5k7lg\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-wh5wz\" (UID: \"c6df5d2d-85d8-4d79-b8a2-f3b6f7060019\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-wh5wz" Oct 07 08:30:22 crc kubenswrapper[4875]: I1007 08:30:22.699806 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-wh5wz" Oct 07 08:30:23 crc kubenswrapper[4875]: I1007 08:30:23.222732 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-wh5wz"] Oct 07 08:30:23 crc kubenswrapper[4875]: W1007 08:30:23.227309 4875 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc6df5d2d_85d8_4d79_b8a2_f3b6f7060019.slice/crio-c1632ea4ed04d9d669f5d928054b128d4b01ee666d4beb7a0a1865b4451e9308 WatchSource:0}: Error finding container c1632ea4ed04d9d669f5d928054b128d4b01ee666d4beb7a0a1865b4451e9308: Status 404 returned error can't find the container with id c1632ea4ed04d9d669f5d928054b128d4b01ee666d4beb7a0a1865b4451e9308 Oct 07 08:30:23 crc kubenswrapper[4875]: I1007 08:30:23.295615 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-wh5wz" event={"ID":"c6df5d2d-85d8-4d79-b8a2-f3b6f7060019","Type":"ContainerStarted","Data":"c1632ea4ed04d9d669f5d928054b128d4b01ee666d4beb7a0a1865b4451e9308"} Oct 07 08:30:24 crc kubenswrapper[4875]: I1007 08:30:24.331772 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-wh5wz" event={"ID":"c6df5d2d-85d8-4d79-b8a2-f3b6f7060019","Type":"ContainerStarted","Data":"49f73ba068360883586d17ec8f485b1022227deeaeb2758a182d7ffd35e8fbde"} Oct 07 08:30:24 crc kubenswrapper[4875]: I1007 08:30:24.354078 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-wh5wz" podStartSLOduration=1.9394135719999999 podStartE2EDuration="2.354059325s" podCreationTimestamp="2025-10-07 08:30:22 +0000 UTC" firstStartedPulling="2025-10-07 08:30:23.230918931 +0000 UTC m=+2048.190689474" lastFinishedPulling="2025-10-07 08:30:23.645564664 +0000 UTC m=+2048.605335227" observedRunningTime="2025-10-07 08:30:24.352741473 +0000 UTC m=+2049.312512046" watchObservedRunningTime="2025-10-07 08:30:24.354059325 +0000 UTC m=+2049.313829868" Oct 07 08:31:17 crc kubenswrapper[4875]: I1007 08:31:17.928281 4875 generic.go:334] "Generic (PLEG): container finished" podID="c6df5d2d-85d8-4d79-b8a2-f3b6f7060019" containerID="49f73ba068360883586d17ec8f485b1022227deeaeb2758a182d7ffd35e8fbde" exitCode=0 Oct 07 08:31:17 crc kubenswrapper[4875]: I1007 08:31:17.928431 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-wh5wz" event={"ID":"c6df5d2d-85d8-4d79-b8a2-f3b6f7060019","Type":"ContainerDied","Data":"49f73ba068360883586d17ec8f485b1022227deeaeb2758a182d7ffd35e8fbde"} Oct 07 08:31:19 crc kubenswrapper[4875]: I1007 08:31:19.426601 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-wh5wz" Oct 07 08:31:19 crc kubenswrapper[4875]: I1007 08:31:19.575254 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/c6df5d2d-85d8-4d79-b8a2-f3b6f7060019-neutron-ovn-metadata-agent-neutron-config-0\") pod \"c6df5d2d-85d8-4d79-b8a2-f3b6f7060019\" (UID: \"c6df5d2d-85d8-4d79-b8a2-f3b6f7060019\") " Oct 07 08:31:19 crc kubenswrapper[4875]: I1007 08:31:19.575391 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5k7lg\" (UniqueName: \"kubernetes.io/projected/c6df5d2d-85d8-4d79-b8a2-f3b6f7060019-kube-api-access-5k7lg\") pod \"c6df5d2d-85d8-4d79-b8a2-f3b6f7060019\" (UID: \"c6df5d2d-85d8-4d79-b8a2-f3b6f7060019\") " Oct 07 08:31:19 crc kubenswrapper[4875]: I1007 08:31:19.575504 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c6df5d2d-85d8-4d79-b8a2-f3b6f7060019-ssh-key\") pod \"c6df5d2d-85d8-4d79-b8a2-f3b6f7060019\" (UID: \"c6df5d2d-85d8-4d79-b8a2-f3b6f7060019\") " Oct 07 08:31:19 crc kubenswrapper[4875]: I1007 08:31:19.575608 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c6df5d2d-85d8-4d79-b8a2-f3b6f7060019-neutron-metadata-combined-ca-bundle\") pod \"c6df5d2d-85d8-4d79-b8a2-f3b6f7060019\" (UID: \"c6df5d2d-85d8-4d79-b8a2-f3b6f7060019\") " Oct 07 08:31:19 crc kubenswrapper[4875]: I1007 08:31:19.575643 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c6df5d2d-85d8-4d79-b8a2-f3b6f7060019-inventory\") pod \"c6df5d2d-85d8-4d79-b8a2-f3b6f7060019\" (UID: \"c6df5d2d-85d8-4d79-b8a2-f3b6f7060019\") " Oct 07 08:31:19 crc kubenswrapper[4875]: I1007 08:31:19.575741 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/c6df5d2d-85d8-4d79-b8a2-f3b6f7060019-nova-metadata-neutron-config-0\") pod \"c6df5d2d-85d8-4d79-b8a2-f3b6f7060019\" (UID: \"c6df5d2d-85d8-4d79-b8a2-f3b6f7060019\") " Oct 07 08:31:19 crc kubenswrapper[4875]: I1007 08:31:19.581762 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c6df5d2d-85d8-4d79-b8a2-f3b6f7060019-neutron-metadata-combined-ca-bundle" (OuterVolumeSpecName: "neutron-metadata-combined-ca-bundle") pod "c6df5d2d-85d8-4d79-b8a2-f3b6f7060019" (UID: "c6df5d2d-85d8-4d79-b8a2-f3b6f7060019"). InnerVolumeSpecName "neutron-metadata-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:31:19 crc kubenswrapper[4875]: I1007 08:31:19.582681 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c6df5d2d-85d8-4d79-b8a2-f3b6f7060019-kube-api-access-5k7lg" (OuterVolumeSpecName: "kube-api-access-5k7lg") pod "c6df5d2d-85d8-4d79-b8a2-f3b6f7060019" (UID: "c6df5d2d-85d8-4d79-b8a2-f3b6f7060019"). InnerVolumeSpecName "kube-api-access-5k7lg". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 08:31:19 crc kubenswrapper[4875]: I1007 08:31:19.602843 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c6df5d2d-85d8-4d79-b8a2-f3b6f7060019-neutron-ovn-metadata-agent-neutron-config-0" (OuterVolumeSpecName: "neutron-ovn-metadata-agent-neutron-config-0") pod "c6df5d2d-85d8-4d79-b8a2-f3b6f7060019" (UID: "c6df5d2d-85d8-4d79-b8a2-f3b6f7060019"). InnerVolumeSpecName "neutron-ovn-metadata-agent-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:31:19 crc kubenswrapper[4875]: I1007 08:31:19.603582 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c6df5d2d-85d8-4d79-b8a2-f3b6f7060019-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "c6df5d2d-85d8-4d79-b8a2-f3b6f7060019" (UID: "c6df5d2d-85d8-4d79-b8a2-f3b6f7060019"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:31:19 crc kubenswrapper[4875]: I1007 08:31:19.604911 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c6df5d2d-85d8-4d79-b8a2-f3b6f7060019-nova-metadata-neutron-config-0" (OuterVolumeSpecName: "nova-metadata-neutron-config-0") pod "c6df5d2d-85d8-4d79-b8a2-f3b6f7060019" (UID: "c6df5d2d-85d8-4d79-b8a2-f3b6f7060019"). InnerVolumeSpecName "nova-metadata-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:31:19 crc kubenswrapper[4875]: I1007 08:31:19.605279 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c6df5d2d-85d8-4d79-b8a2-f3b6f7060019-inventory" (OuterVolumeSpecName: "inventory") pod "c6df5d2d-85d8-4d79-b8a2-f3b6f7060019" (UID: "c6df5d2d-85d8-4d79-b8a2-f3b6f7060019"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:31:19 crc kubenswrapper[4875]: I1007 08:31:19.678189 4875 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/c6df5d2d-85d8-4d79-b8a2-f3b6f7060019-nova-metadata-neutron-config-0\") on node \"crc\" DevicePath \"\"" Oct 07 08:31:19 crc kubenswrapper[4875]: I1007 08:31:19.678231 4875 reconciler_common.go:293] "Volume detached for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/c6df5d2d-85d8-4d79-b8a2-f3b6f7060019-neutron-ovn-metadata-agent-neutron-config-0\") on node \"crc\" DevicePath \"\"" Oct 07 08:31:19 crc kubenswrapper[4875]: I1007 08:31:19.678247 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5k7lg\" (UniqueName: \"kubernetes.io/projected/c6df5d2d-85d8-4d79-b8a2-f3b6f7060019-kube-api-access-5k7lg\") on node \"crc\" DevicePath \"\"" Oct 07 08:31:19 crc kubenswrapper[4875]: I1007 08:31:19.678261 4875 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c6df5d2d-85d8-4d79-b8a2-f3b6f7060019-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 07 08:31:19 crc kubenswrapper[4875]: I1007 08:31:19.678271 4875 reconciler_common.go:293] "Volume detached for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c6df5d2d-85d8-4d79-b8a2-f3b6f7060019-neutron-metadata-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 08:31:19 crc kubenswrapper[4875]: I1007 08:31:19.678282 4875 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c6df5d2d-85d8-4d79-b8a2-f3b6f7060019-inventory\") on node \"crc\" DevicePath \"\"" Oct 07 08:31:19 crc kubenswrapper[4875]: I1007 08:31:19.944474 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-wh5wz" event={"ID":"c6df5d2d-85d8-4d79-b8a2-f3b6f7060019","Type":"ContainerDied","Data":"c1632ea4ed04d9d669f5d928054b128d4b01ee666d4beb7a0a1865b4451e9308"} Oct 07 08:31:19 crc kubenswrapper[4875]: I1007 08:31:19.945344 4875 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c1632ea4ed04d9d669f5d928054b128d4b01ee666d4beb7a0a1865b4451e9308" Oct 07 08:31:19 crc kubenswrapper[4875]: I1007 08:31:19.944801 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-wh5wz" Oct 07 08:31:20 crc kubenswrapper[4875]: I1007 08:31:20.047213 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-69n9w"] Oct 07 08:31:20 crc kubenswrapper[4875]: E1007 08:31:20.048010 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c6df5d2d-85d8-4d79-b8a2-f3b6f7060019" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Oct 07 08:31:20 crc kubenswrapper[4875]: I1007 08:31:20.048043 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="c6df5d2d-85d8-4d79-b8a2-f3b6f7060019" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Oct 07 08:31:20 crc kubenswrapper[4875]: I1007 08:31:20.048403 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="c6df5d2d-85d8-4d79-b8a2-f3b6f7060019" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Oct 07 08:31:20 crc kubenswrapper[4875]: I1007 08:31:20.049701 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-69n9w" Oct 07 08:31:20 crc kubenswrapper[4875]: I1007 08:31:20.052419 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"libvirt-secret" Oct 07 08:31:20 crc kubenswrapper[4875]: I1007 08:31:20.053927 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-bl4pq" Oct 07 08:31:20 crc kubenswrapper[4875]: I1007 08:31:20.057913 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-69n9w"] Oct 07 08:31:20 crc kubenswrapper[4875]: I1007 08:31:20.066924 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 07 08:31:20 crc kubenswrapper[4875]: I1007 08:31:20.067154 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 07 08:31:20 crc kubenswrapper[4875]: I1007 08:31:20.070032 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 07 08:31:20 crc kubenswrapper[4875]: I1007 08:31:20.187947 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3ec6c99f-4455-40ea-8a27-bf56298f3e17-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-69n9w\" (UID: \"3ec6c99f-4455-40ea-8a27-bf56298f3e17\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-69n9w" Oct 07 08:31:20 crc kubenswrapper[4875]: I1007 08:31:20.188039 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/3ec6c99f-4455-40ea-8a27-bf56298f3e17-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-69n9w\" (UID: \"3ec6c99f-4455-40ea-8a27-bf56298f3e17\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-69n9w" Oct 07 08:31:20 crc kubenswrapper[4875]: I1007 08:31:20.188084 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3ec6c99f-4455-40ea-8a27-bf56298f3e17-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-69n9w\" (UID: \"3ec6c99f-4455-40ea-8a27-bf56298f3e17\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-69n9w" Oct 07 08:31:20 crc kubenswrapper[4875]: I1007 08:31:20.188173 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9ph54\" (UniqueName: \"kubernetes.io/projected/3ec6c99f-4455-40ea-8a27-bf56298f3e17-kube-api-access-9ph54\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-69n9w\" (UID: \"3ec6c99f-4455-40ea-8a27-bf56298f3e17\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-69n9w" Oct 07 08:31:20 crc kubenswrapper[4875]: I1007 08:31:20.188241 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3ec6c99f-4455-40ea-8a27-bf56298f3e17-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-69n9w\" (UID: \"3ec6c99f-4455-40ea-8a27-bf56298f3e17\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-69n9w" Oct 07 08:31:20 crc kubenswrapper[4875]: I1007 08:31:20.289600 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/3ec6c99f-4455-40ea-8a27-bf56298f3e17-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-69n9w\" (UID: \"3ec6c99f-4455-40ea-8a27-bf56298f3e17\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-69n9w" Oct 07 08:31:20 crc kubenswrapper[4875]: I1007 08:31:20.289681 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3ec6c99f-4455-40ea-8a27-bf56298f3e17-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-69n9w\" (UID: \"3ec6c99f-4455-40ea-8a27-bf56298f3e17\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-69n9w" Oct 07 08:31:20 crc kubenswrapper[4875]: I1007 08:31:20.289749 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9ph54\" (UniqueName: \"kubernetes.io/projected/3ec6c99f-4455-40ea-8a27-bf56298f3e17-kube-api-access-9ph54\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-69n9w\" (UID: \"3ec6c99f-4455-40ea-8a27-bf56298f3e17\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-69n9w" Oct 07 08:31:20 crc kubenswrapper[4875]: I1007 08:31:20.289789 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3ec6c99f-4455-40ea-8a27-bf56298f3e17-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-69n9w\" (UID: \"3ec6c99f-4455-40ea-8a27-bf56298f3e17\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-69n9w" Oct 07 08:31:20 crc kubenswrapper[4875]: I1007 08:31:20.289831 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3ec6c99f-4455-40ea-8a27-bf56298f3e17-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-69n9w\" (UID: \"3ec6c99f-4455-40ea-8a27-bf56298f3e17\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-69n9w" Oct 07 08:31:20 crc kubenswrapper[4875]: I1007 08:31:20.294552 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3ec6c99f-4455-40ea-8a27-bf56298f3e17-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-69n9w\" (UID: \"3ec6c99f-4455-40ea-8a27-bf56298f3e17\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-69n9w" Oct 07 08:31:20 crc kubenswrapper[4875]: I1007 08:31:20.296321 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3ec6c99f-4455-40ea-8a27-bf56298f3e17-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-69n9w\" (UID: \"3ec6c99f-4455-40ea-8a27-bf56298f3e17\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-69n9w" Oct 07 08:31:20 crc kubenswrapper[4875]: I1007 08:31:20.296725 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/3ec6c99f-4455-40ea-8a27-bf56298f3e17-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-69n9w\" (UID: \"3ec6c99f-4455-40ea-8a27-bf56298f3e17\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-69n9w" Oct 07 08:31:20 crc kubenswrapper[4875]: I1007 08:31:20.304825 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3ec6c99f-4455-40ea-8a27-bf56298f3e17-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-69n9w\" (UID: \"3ec6c99f-4455-40ea-8a27-bf56298f3e17\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-69n9w" Oct 07 08:31:20 crc kubenswrapper[4875]: I1007 08:31:20.308197 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9ph54\" (UniqueName: \"kubernetes.io/projected/3ec6c99f-4455-40ea-8a27-bf56298f3e17-kube-api-access-9ph54\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-69n9w\" (UID: \"3ec6c99f-4455-40ea-8a27-bf56298f3e17\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-69n9w" Oct 07 08:31:20 crc kubenswrapper[4875]: I1007 08:31:20.381385 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-69n9w" Oct 07 08:31:20 crc kubenswrapper[4875]: I1007 08:31:20.897268 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-69n9w"] Oct 07 08:31:20 crc kubenswrapper[4875]: I1007 08:31:20.954664 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-69n9w" event={"ID":"3ec6c99f-4455-40ea-8a27-bf56298f3e17","Type":"ContainerStarted","Data":"373d38bb03e93c641a19f06718c100ee6bdd207be70e2ed1573163ed12e88a99"} Oct 07 08:31:22 crc kubenswrapper[4875]: I1007 08:31:22.018042 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-l5vjd"] Oct 07 08:31:22 crc kubenswrapper[4875]: I1007 08:31:22.021287 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-l5vjd" Oct 07 08:31:22 crc kubenswrapper[4875]: I1007 08:31:22.036224 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-l5vjd"] Oct 07 08:31:22 crc kubenswrapper[4875]: I1007 08:31:22.126668 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/85d403c8-c331-4ec9-87e0-c830342e4abd-catalog-content\") pod \"redhat-marketplace-l5vjd\" (UID: \"85d403c8-c331-4ec9-87e0-c830342e4abd\") " pod="openshift-marketplace/redhat-marketplace-l5vjd" Oct 07 08:31:22 crc kubenswrapper[4875]: I1007 08:31:22.126870 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/85d403c8-c331-4ec9-87e0-c830342e4abd-utilities\") pod \"redhat-marketplace-l5vjd\" (UID: \"85d403c8-c331-4ec9-87e0-c830342e4abd\") " pod="openshift-marketplace/redhat-marketplace-l5vjd" Oct 07 08:31:22 crc kubenswrapper[4875]: I1007 08:31:22.126905 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t5tl9\" (UniqueName: \"kubernetes.io/projected/85d403c8-c331-4ec9-87e0-c830342e4abd-kube-api-access-t5tl9\") pod \"redhat-marketplace-l5vjd\" (UID: \"85d403c8-c331-4ec9-87e0-c830342e4abd\") " pod="openshift-marketplace/redhat-marketplace-l5vjd" Oct 07 08:31:22 crc kubenswrapper[4875]: I1007 08:31:22.228126 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/85d403c8-c331-4ec9-87e0-c830342e4abd-utilities\") pod \"redhat-marketplace-l5vjd\" (UID: \"85d403c8-c331-4ec9-87e0-c830342e4abd\") " pod="openshift-marketplace/redhat-marketplace-l5vjd" Oct 07 08:31:22 crc kubenswrapper[4875]: I1007 08:31:22.228454 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t5tl9\" (UniqueName: \"kubernetes.io/projected/85d403c8-c331-4ec9-87e0-c830342e4abd-kube-api-access-t5tl9\") pod \"redhat-marketplace-l5vjd\" (UID: \"85d403c8-c331-4ec9-87e0-c830342e4abd\") " pod="openshift-marketplace/redhat-marketplace-l5vjd" Oct 07 08:31:22 crc kubenswrapper[4875]: I1007 08:31:22.228521 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/85d403c8-c331-4ec9-87e0-c830342e4abd-catalog-content\") pod \"redhat-marketplace-l5vjd\" (UID: \"85d403c8-c331-4ec9-87e0-c830342e4abd\") " pod="openshift-marketplace/redhat-marketplace-l5vjd" Oct 07 08:31:22 crc kubenswrapper[4875]: I1007 08:31:22.229093 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/85d403c8-c331-4ec9-87e0-c830342e4abd-utilities\") pod \"redhat-marketplace-l5vjd\" (UID: \"85d403c8-c331-4ec9-87e0-c830342e4abd\") " pod="openshift-marketplace/redhat-marketplace-l5vjd" Oct 07 08:31:22 crc kubenswrapper[4875]: I1007 08:31:22.229174 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/85d403c8-c331-4ec9-87e0-c830342e4abd-catalog-content\") pod \"redhat-marketplace-l5vjd\" (UID: \"85d403c8-c331-4ec9-87e0-c830342e4abd\") " pod="openshift-marketplace/redhat-marketplace-l5vjd" Oct 07 08:31:22 crc kubenswrapper[4875]: I1007 08:31:22.250939 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t5tl9\" (UniqueName: \"kubernetes.io/projected/85d403c8-c331-4ec9-87e0-c830342e4abd-kube-api-access-t5tl9\") pod \"redhat-marketplace-l5vjd\" (UID: \"85d403c8-c331-4ec9-87e0-c830342e4abd\") " pod="openshift-marketplace/redhat-marketplace-l5vjd" Oct 07 08:31:22 crc kubenswrapper[4875]: I1007 08:31:22.359508 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-l5vjd" Oct 07 08:31:22 crc kubenswrapper[4875]: I1007 08:31:22.845899 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-l5vjd"] Oct 07 08:31:22 crc kubenswrapper[4875]: W1007 08:31:22.852768 4875 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod85d403c8_c331_4ec9_87e0_c830342e4abd.slice/crio-844cd777c064d9a51ed12219a3782f376c4e27ed4d03a74b1663ef993dbbe1c1 WatchSource:0}: Error finding container 844cd777c064d9a51ed12219a3782f376c4e27ed4d03a74b1663ef993dbbe1c1: Status 404 returned error can't find the container with id 844cd777c064d9a51ed12219a3782f376c4e27ed4d03a74b1663ef993dbbe1c1 Oct 07 08:31:22 crc kubenswrapper[4875]: I1007 08:31:22.978833 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-l5vjd" event={"ID":"85d403c8-c331-4ec9-87e0-c830342e4abd","Type":"ContainerStarted","Data":"844cd777c064d9a51ed12219a3782f376c4e27ed4d03a74b1663ef993dbbe1c1"} Oct 07 08:31:22 crc kubenswrapper[4875]: I1007 08:31:22.980612 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-69n9w" event={"ID":"3ec6c99f-4455-40ea-8a27-bf56298f3e17","Type":"ContainerStarted","Data":"ed817b11eec979fcd4ec67f6e9632ac34e5f7ad39d622eb4e1da1fb701a3599f"} Oct 07 08:31:23 crc kubenswrapper[4875]: I1007 08:31:22.999770 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-69n9w" podStartSLOduration=1.6688251859999998 podStartE2EDuration="2.999749673s" podCreationTimestamp="2025-10-07 08:31:20 +0000 UTC" firstStartedPulling="2025-10-07 08:31:20.902696934 +0000 UTC m=+2105.862467477" lastFinishedPulling="2025-10-07 08:31:22.233621421 +0000 UTC m=+2107.193391964" observedRunningTime="2025-10-07 08:31:22.99524663 +0000 UTC m=+2107.955017183" watchObservedRunningTime="2025-10-07 08:31:22.999749673 +0000 UTC m=+2107.959520216" Oct 07 08:31:23 crc kubenswrapper[4875]: I1007 08:31:23.991238 4875 generic.go:334] "Generic (PLEG): container finished" podID="85d403c8-c331-4ec9-87e0-c830342e4abd" containerID="fe509ba0c84727eccc17b4aa49d33370949ac819629cec3221a90ddf22378a22" exitCode=0 Oct 07 08:31:23 crc kubenswrapper[4875]: I1007 08:31:23.991296 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-l5vjd" event={"ID":"85d403c8-c331-4ec9-87e0-c830342e4abd","Type":"ContainerDied","Data":"fe509ba0c84727eccc17b4aa49d33370949ac819629cec3221a90ddf22378a22"} Oct 07 08:31:26 crc kubenswrapper[4875]: I1007 08:31:26.011008 4875 generic.go:334] "Generic (PLEG): container finished" podID="85d403c8-c331-4ec9-87e0-c830342e4abd" containerID="766f7614a9210ad4372656ae4f54c96ecbeae0918b6d6c2ae73a882daa15e6d8" exitCode=0 Oct 07 08:31:26 crc kubenswrapper[4875]: I1007 08:31:26.011070 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-l5vjd" event={"ID":"85d403c8-c331-4ec9-87e0-c830342e4abd","Type":"ContainerDied","Data":"766f7614a9210ad4372656ae4f54c96ecbeae0918b6d6c2ae73a882daa15e6d8"} Oct 07 08:31:26 crc kubenswrapper[4875]: I1007 08:31:26.641022 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-qzxss"] Oct 07 08:31:26 crc kubenswrapper[4875]: I1007 08:31:26.646491 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-qzxss" Oct 07 08:31:26 crc kubenswrapper[4875]: I1007 08:31:26.681467 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-qzxss"] Oct 07 08:31:26 crc kubenswrapper[4875]: I1007 08:31:26.745982 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bd8d99ff-743d-4b0e-a709-94d6dc6861ec-utilities\") pod \"community-operators-qzxss\" (UID: \"bd8d99ff-743d-4b0e-a709-94d6dc6861ec\") " pod="openshift-marketplace/community-operators-qzxss" Oct 07 08:31:26 crc kubenswrapper[4875]: I1007 08:31:26.746190 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vpnjl\" (UniqueName: \"kubernetes.io/projected/bd8d99ff-743d-4b0e-a709-94d6dc6861ec-kube-api-access-vpnjl\") pod \"community-operators-qzxss\" (UID: \"bd8d99ff-743d-4b0e-a709-94d6dc6861ec\") " pod="openshift-marketplace/community-operators-qzxss" Oct 07 08:31:26 crc kubenswrapper[4875]: I1007 08:31:26.746346 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bd8d99ff-743d-4b0e-a709-94d6dc6861ec-catalog-content\") pod \"community-operators-qzxss\" (UID: \"bd8d99ff-743d-4b0e-a709-94d6dc6861ec\") " pod="openshift-marketplace/community-operators-qzxss" Oct 07 08:31:26 crc kubenswrapper[4875]: I1007 08:31:26.848006 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bd8d99ff-743d-4b0e-a709-94d6dc6861ec-utilities\") pod \"community-operators-qzxss\" (UID: \"bd8d99ff-743d-4b0e-a709-94d6dc6861ec\") " pod="openshift-marketplace/community-operators-qzxss" Oct 07 08:31:26 crc kubenswrapper[4875]: I1007 08:31:26.848142 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vpnjl\" (UniqueName: \"kubernetes.io/projected/bd8d99ff-743d-4b0e-a709-94d6dc6861ec-kube-api-access-vpnjl\") pod \"community-operators-qzxss\" (UID: \"bd8d99ff-743d-4b0e-a709-94d6dc6861ec\") " pod="openshift-marketplace/community-operators-qzxss" Oct 07 08:31:26 crc kubenswrapper[4875]: I1007 08:31:26.848626 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bd8d99ff-743d-4b0e-a709-94d6dc6861ec-utilities\") pod \"community-operators-qzxss\" (UID: \"bd8d99ff-743d-4b0e-a709-94d6dc6861ec\") " pod="openshift-marketplace/community-operators-qzxss" Oct 07 08:31:26 crc kubenswrapper[4875]: I1007 08:31:26.848739 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bd8d99ff-743d-4b0e-a709-94d6dc6861ec-catalog-content\") pod \"community-operators-qzxss\" (UID: \"bd8d99ff-743d-4b0e-a709-94d6dc6861ec\") " pod="openshift-marketplace/community-operators-qzxss" Oct 07 08:31:26 crc kubenswrapper[4875]: I1007 08:31:26.849176 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bd8d99ff-743d-4b0e-a709-94d6dc6861ec-catalog-content\") pod \"community-operators-qzxss\" (UID: \"bd8d99ff-743d-4b0e-a709-94d6dc6861ec\") " pod="openshift-marketplace/community-operators-qzxss" Oct 07 08:31:26 crc kubenswrapper[4875]: I1007 08:31:26.878166 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vpnjl\" (UniqueName: \"kubernetes.io/projected/bd8d99ff-743d-4b0e-a709-94d6dc6861ec-kube-api-access-vpnjl\") pod \"community-operators-qzxss\" (UID: \"bd8d99ff-743d-4b0e-a709-94d6dc6861ec\") " pod="openshift-marketplace/community-operators-qzxss" Oct 07 08:31:27 crc kubenswrapper[4875]: I1007 08:31:27.004942 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-qzxss" Oct 07 08:31:27 crc kubenswrapper[4875]: I1007 08:31:27.050013 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-l5vjd" event={"ID":"85d403c8-c331-4ec9-87e0-c830342e4abd","Type":"ContainerStarted","Data":"0cedf6ff90d0de3ae59941f95b5259febb708ec3c3b6d3f249f4239af535c9a1"} Oct 07 08:31:27 crc kubenswrapper[4875]: I1007 08:31:27.085123 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-l5vjd" podStartSLOduration=3.538229978 podStartE2EDuration="6.085089512s" podCreationTimestamp="2025-10-07 08:31:21 +0000 UTC" firstStartedPulling="2025-10-07 08:31:23.993554827 +0000 UTC m=+2108.953325370" lastFinishedPulling="2025-10-07 08:31:26.540414361 +0000 UTC m=+2111.500184904" observedRunningTime="2025-10-07 08:31:27.071706545 +0000 UTC m=+2112.031477108" watchObservedRunningTime="2025-10-07 08:31:27.085089512 +0000 UTC m=+2112.044860055" Oct 07 08:31:27 crc kubenswrapper[4875]: I1007 08:31:27.547944 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-qzxss"] Oct 07 08:31:28 crc kubenswrapper[4875]: I1007 08:31:28.059521 4875 generic.go:334] "Generic (PLEG): container finished" podID="bd8d99ff-743d-4b0e-a709-94d6dc6861ec" containerID="5c5281fadf544f4f1ac8a4edd85842634b9580b4a637336ddc054e10320bac60" exitCode=0 Oct 07 08:31:28 crc kubenswrapper[4875]: I1007 08:31:28.059592 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qzxss" event={"ID":"bd8d99ff-743d-4b0e-a709-94d6dc6861ec","Type":"ContainerDied","Data":"5c5281fadf544f4f1ac8a4edd85842634b9580b4a637336ddc054e10320bac60"} Oct 07 08:31:28 crc kubenswrapper[4875]: I1007 08:31:28.059634 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qzxss" event={"ID":"bd8d99ff-743d-4b0e-a709-94d6dc6861ec","Type":"ContainerStarted","Data":"bd22a939a1b4a1632c0b9e8c8deab79ca4f59bb417468be8ad6be676bfe5294e"} Oct 07 08:31:29 crc kubenswrapper[4875]: I1007 08:31:29.068540 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qzxss" event={"ID":"bd8d99ff-743d-4b0e-a709-94d6dc6861ec","Type":"ContainerStarted","Data":"4834a8a14af5f33c8407aa2d274a82d2ce43b789cd3a395b19da4c719a84379e"} Oct 07 08:31:30 crc kubenswrapper[4875]: I1007 08:31:30.083799 4875 generic.go:334] "Generic (PLEG): container finished" podID="bd8d99ff-743d-4b0e-a709-94d6dc6861ec" containerID="4834a8a14af5f33c8407aa2d274a82d2ce43b789cd3a395b19da4c719a84379e" exitCode=0 Oct 07 08:31:30 crc kubenswrapper[4875]: I1007 08:31:30.083906 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qzxss" event={"ID":"bd8d99ff-743d-4b0e-a709-94d6dc6861ec","Type":"ContainerDied","Data":"4834a8a14af5f33c8407aa2d274a82d2ce43b789cd3a395b19da4c719a84379e"} Oct 07 08:31:31 crc kubenswrapper[4875]: I1007 08:31:31.100206 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qzxss" event={"ID":"bd8d99ff-743d-4b0e-a709-94d6dc6861ec","Type":"ContainerStarted","Data":"7f3bb0307ca211e163d251b760042ee45a5cbf01b46ec3c07fe42f3f3b958ce2"} Oct 07 08:31:31 crc kubenswrapper[4875]: I1007 08:31:31.124058 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-qzxss" podStartSLOduration=2.678504522 podStartE2EDuration="5.12404034s" podCreationTimestamp="2025-10-07 08:31:26 +0000 UTC" firstStartedPulling="2025-10-07 08:31:28.06181089 +0000 UTC m=+2113.021581443" lastFinishedPulling="2025-10-07 08:31:30.507346718 +0000 UTC m=+2115.467117261" observedRunningTime="2025-10-07 08:31:31.118555954 +0000 UTC m=+2116.078326497" watchObservedRunningTime="2025-10-07 08:31:31.12404034 +0000 UTC m=+2116.083810883" Oct 07 08:31:32 crc kubenswrapper[4875]: I1007 08:31:32.360314 4875 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-l5vjd" Oct 07 08:31:32 crc kubenswrapper[4875]: I1007 08:31:32.360619 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-l5vjd" Oct 07 08:31:32 crc kubenswrapper[4875]: I1007 08:31:32.409483 4875 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-l5vjd" Oct 07 08:31:33 crc kubenswrapper[4875]: I1007 08:31:33.159135 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-l5vjd" Oct 07 08:31:33 crc kubenswrapper[4875]: I1007 08:31:33.611557 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-l5vjd"] Oct 07 08:31:35 crc kubenswrapper[4875]: I1007 08:31:35.135158 4875 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-l5vjd" podUID="85d403c8-c331-4ec9-87e0-c830342e4abd" containerName="registry-server" containerID="cri-o://0cedf6ff90d0de3ae59941f95b5259febb708ec3c3b6d3f249f4239af535c9a1" gracePeriod=2 Oct 07 08:31:35 crc kubenswrapper[4875]: I1007 08:31:35.642332 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-l5vjd" Oct 07 08:31:35 crc kubenswrapper[4875]: I1007 08:31:35.734023 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t5tl9\" (UniqueName: \"kubernetes.io/projected/85d403c8-c331-4ec9-87e0-c830342e4abd-kube-api-access-t5tl9\") pod \"85d403c8-c331-4ec9-87e0-c830342e4abd\" (UID: \"85d403c8-c331-4ec9-87e0-c830342e4abd\") " Oct 07 08:31:35 crc kubenswrapper[4875]: I1007 08:31:35.734427 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/85d403c8-c331-4ec9-87e0-c830342e4abd-utilities\") pod \"85d403c8-c331-4ec9-87e0-c830342e4abd\" (UID: \"85d403c8-c331-4ec9-87e0-c830342e4abd\") " Oct 07 08:31:35 crc kubenswrapper[4875]: I1007 08:31:35.734667 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/85d403c8-c331-4ec9-87e0-c830342e4abd-catalog-content\") pod \"85d403c8-c331-4ec9-87e0-c830342e4abd\" (UID: \"85d403c8-c331-4ec9-87e0-c830342e4abd\") " Oct 07 08:31:35 crc kubenswrapper[4875]: I1007 08:31:35.736569 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/85d403c8-c331-4ec9-87e0-c830342e4abd-utilities" (OuterVolumeSpecName: "utilities") pod "85d403c8-c331-4ec9-87e0-c830342e4abd" (UID: "85d403c8-c331-4ec9-87e0-c830342e4abd"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 08:31:35 crc kubenswrapper[4875]: I1007 08:31:35.743953 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/85d403c8-c331-4ec9-87e0-c830342e4abd-kube-api-access-t5tl9" (OuterVolumeSpecName: "kube-api-access-t5tl9") pod "85d403c8-c331-4ec9-87e0-c830342e4abd" (UID: "85d403c8-c331-4ec9-87e0-c830342e4abd"). InnerVolumeSpecName "kube-api-access-t5tl9". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 08:31:35 crc kubenswrapper[4875]: I1007 08:31:35.752534 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/85d403c8-c331-4ec9-87e0-c830342e4abd-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "85d403c8-c331-4ec9-87e0-c830342e4abd" (UID: "85d403c8-c331-4ec9-87e0-c830342e4abd"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 08:31:35 crc kubenswrapper[4875]: I1007 08:31:35.838687 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t5tl9\" (UniqueName: \"kubernetes.io/projected/85d403c8-c331-4ec9-87e0-c830342e4abd-kube-api-access-t5tl9\") on node \"crc\" DevicePath \"\"" Oct 07 08:31:35 crc kubenswrapper[4875]: I1007 08:31:35.838736 4875 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/85d403c8-c331-4ec9-87e0-c830342e4abd-utilities\") on node \"crc\" DevicePath \"\"" Oct 07 08:31:35 crc kubenswrapper[4875]: I1007 08:31:35.838752 4875 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/85d403c8-c331-4ec9-87e0-c830342e4abd-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 07 08:31:36 crc kubenswrapper[4875]: I1007 08:31:36.148042 4875 generic.go:334] "Generic (PLEG): container finished" podID="85d403c8-c331-4ec9-87e0-c830342e4abd" containerID="0cedf6ff90d0de3ae59941f95b5259febb708ec3c3b6d3f249f4239af535c9a1" exitCode=0 Oct 07 08:31:36 crc kubenswrapper[4875]: I1007 08:31:36.148086 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-l5vjd" event={"ID":"85d403c8-c331-4ec9-87e0-c830342e4abd","Type":"ContainerDied","Data":"0cedf6ff90d0de3ae59941f95b5259febb708ec3c3b6d3f249f4239af535c9a1"} Oct 07 08:31:36 crc kubenswrapper[4875]: I1007 08:31:36.148113 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-l5vjd" event={"ID":"85d403c8-c331-4ec9-87e0-c830342e4abd","Type":"ContainerDied","Data":"844cd777c064d9a51ed12219a3782f376c4e27ed4d03a74b1663ef993dbbe1c1"} Oct 07 08:31:36 crc kubenswrapper[4875]: I1007 08:31:36.148126 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-l5vjd" Oct 07 08:31:36 crc kubenswrapper[4875]: I1007 08:31:36.148135 4875 scope.go:117] "RemoveContainer" containerID="0cedf6ff90d0de3ae59941f95b5259febb708ec3c3b6d3f249f4239af535c9a1" Oct 07 08:31:36 crc kubenswrapper[4875]: I1007 08:31:36.186129 4875 scope.go:117] "RemoveContainer" containerID="766f7614a9210ad4372656ae4f54c96ecbeae0918b6d6c2ae73a882daa15e6d8" Oct 07 08:31:36 crc kubenswrapper[4875]: I1007 08:31:36.206886 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-l5vjd"] Oct 07 08:31:36 crc kubenswrapper[4875]: I1007 08:31:36.215745 4875 scope.go:117] "RemoveContainer" containerID="fe509ba0c84727eccc17b4aa49d33370949ac819629cec3221a90ddf22378a22" Oct 07 08:31:36 crc kubenswrapper[4875]: I1007 08:31:36.222268 4875 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-l5vjd"] Oct 07 08:31:36 crc kubenswrapper[4875]: I1007 08:31:36.276322 4875 scope.go:117] "RemoveContainer" containerID="0cedf6ff90d0de3ae59941f95b5259febb708ec3c3b6d3f249f4239af535c9a1" Oct 07 08:31:36 crc kubenswrapper[4875]: E1007 08:31:36.276707 4875 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0cedf6ff90d0de3ae59941f95b5259febb708ec3c3b6d3f249f4239af535c9a1\": container with ID starting with 0cedf6ff90d0de3ae59941f95b5259febb708ec3c3b6d3f249f4239af535c9a1 not found: ID does not exist" containerID="0cedf6ff90d0de3ae59941f95b5259febb708ec3c3b6d3f249f4239af535c9a1" Oct 07 08:31:36 crc kubenswrapper[4875]: I1007 08:31:36.276757 4875 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0cedf6ff90d0de3ae59941f95b5259febb708ec3c3b6d3f249f4239af535c9a1"} err="failed to get container status \"0cedf6ff90d0de3ae59941f95b5259febb708ec3c3b6d3f249f4239af535c9a1\": rpc error: code = NotFound desc = could not find container \"0cedf6ff90d0de3ae59941f95b5259febb708ec3c3b6d3f249f4239af535c9a1\": container with ID starting with 0cedf6ff90d0de3ae59941f95b5259febb708ec3c3b6d3f249f4239af535c9a1 not found: ID does not exist" Oct 07 08:31:36 crc kubenswrapper[4875]: I1007 08:31:36.276789 4875 scope.go:117] "RemoveContainer" containerID="766f7614a9210ad4372656ae4f54c96ecbeae0918b6d6c2ae73a882daa15e6d8" Oct 07 08:31:36 crc kubenswrapper[4875]: E1007 08:31:36.277208 4875 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"766f7614a9210ad4372656ae4f54c96ecbeae0918b6d6c2ae73a882daa15e6d8\": container with ID starting with 766f7614a9210ad4372656ae4f54c96ecbeae0918b6d6c2ae73a882daa15e6d8 not found: ID does not exist" containerID="766f7614a9210ad4372656ae4f54c96ecbeae0918b6d6c2ae73a882daa15e6d8" Oct 07 08:31:36 crc kubenswrapper[4875]: I1007 08:31:36.277251 4875 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"766f7614a9210ad4372656ae4f54c96ecbeae0918b6d6c2ae73a882daa15e6d8"} err="failed to get container status \"766f7614a9210ad4372656ae4f54c96ecbeae0918b6d6c2ae73a882daa15e6d8\": rpc error: code = NotFound desc = could not find container \"766f7614a9210ad4372656ae4f54c96ecbeae0918b6d6c2ae73a882daa15e6d8\": container with ID starting with 766f7614a9210ad4372656ae4f54c96ecbeae0918b6d6c2ae73a882daa15e6d8 not found: ID does not exist" Oct 07 08:31:36 crc kubenswrapper[4875]: I1007 08:31:36.277274 4875 scope.go:117] "RemoveContainer" containerID="fe509ba0c84727eccc17b4aa49d33370949ac819629cec3221a90ddf22378a22" Oct 07 08:31:36 crc kubenswrapper[4875]: E1007 08:31:36.278241 4875 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fe509ba0c84727eccc17b4aa49d33370949ac819629cec3221a90ddf22378a22\": container with ID starting with fe509ba0c84727eccc17b4aa49d33370949ac819629cec3221a90ddf22378a22 not found: ID does not exist" containerID="fe509ba0c84727eccc17b4aa49d33370949ac819629cec3221a90ddf22378a22" Oct 07 08:31:36 crc kubenswrapper[4875]: I1007 08:31:36.278310 4875 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fe509ba0c84727eccc17b4aa49d33370949ac819629cec3221a90ddf22378a22"} err="failed to get container status \"fe509ba0c84727eccc17b4aa49d33370949ac819629cec3221a90ddf22378a22\": rpc error: code = NotFound desc = could not find container \"fe509ba0c84727eccc17b4aa49d33370949ac819629cec3221a90ddf22378a22\": container with ID starting with fe509ba0c84727eccc17b4aa49d33370949ac819629cec3221a90ddf22378a22 not found: ID does not exist" Oct 07 08:31:37 crc kubenswrapper[4875]: I1007 08:31:37.005834 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-qzxss" Oct 07 08:31:37 crc kubenswrapper[4875]: I1007 08:31:37.006379 4875 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-qzxss" Oct 07 08:31:37 crc kubenswrapper[4875]: I1007 08:31:37.060312 4875 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-qzxss" Oct 07 08:31:37 crc kubenswrapper[4875]: I1007 08:31:37.219857 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-qzxss" Oct 07 08:31:37 crc kubenswrapper[4875]: I1007 08:31:37.711698 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="85d403c8-c331-4ec9-87e0-c830342e4abd" path="/var/lib/kubelet/pods/85d403c8-c331-4ec9-87e0-c830342e4abd/volumes" Oct 07 08:31:38 crc kubenswrapper[4875]: I1007 08:31:38.999827 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-qzxss"] Oct 07 08:31:39 crc kubenswrapper[4875]: I1007 08:31:39.182582 4875 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-qzxss" podUID="bd8d99ff-743d-4b0e-a709-94d6dc6861ec" containerName="registry-server" containerID="cri-o://7f3bb0307ca211e163d251b760042ee45a5cbf01b46ec3c07fe42f3f3b958ce2" gracePeriod=2 Oct 07 08:31:39 crc kubenswrapper[4875]: I1007 08:31:39.659543 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-qzxss" Oct 07 08:31:39 crc kubenswrapper[4875]: I1007 08:31:39.720305 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bd8d99ff-743d-4b0e-a709-94d6dc6861ec-utilities\") pod \"bd8d99ff-743d-4b0e-a709-94d6dc6861ec\" (UID: \"bd8d99ff-743d-4b0e-a709-94d6dc6861ec\") " Oct 07 08:31:39 crc kubenswrapper[4875]: I1007 08:31:39.720375 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bd8d99ff-743d-4b0e-a709-94d6dc6861ec-catalog-content\") pod \"bd8d99ff-743d-4b0e-a709-94d6dc6861ec\" (UID: \"bd8d99ff-743d-4b0e-a709-94d6dc6861ec\") " Oct 07 08:31:39 crc kubenswrapper[4875]: I1007 08:31:39.720428 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vpnjl\" (UniqueName: \"kubernetes.io/projected/bd8d99ff-743d-4b0e-a709-94d6dc6861ec-kube-api-access-vpnjl\") pod \"bd8d99ff-743d-4b0e-a709-94d6dc6861ec\" (UID: \"bd8d99ff-743d-4b0e-a709-94d6dc6861ec\") " Oct 07 08:31:39 crc kubenswrapper[4875]: I1007 08:31:39.721280 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bd8d99ff-743d-4b0e-a709-94d6dc6861ec-utilities" (OuterVolumeSpecName: "utilities") pod "bd8d99ff-743d-4b0e-a709-94d6dc6861ec" (UID: "bd8d99ff-743d-4b0e-a709-94d6dc6861ec"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 08:31:39 crc kubenswrapper[4875]: I1007 08:31:39.729777 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd8d99ff-743d-4b0e-a709-94d6dc6861ec-kube-api-access-vpnjl" (OuterVolumeSpecName: "kube-api-access-vpnjl") pod "bd8d99ff-743d-4b0e-a709-94d6dc6861ec" (UID: "bd8d99ff-743d-4b0e-a709-94d6dc6861ec"). InnerVolumeSpecName "kube-api-access-vpnjl". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 08:31:39 crc kubenswrapper[4875]: I1007 08:31:39.768462 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bd8d99ff-743d-4b0e-a709-94d6dc6861ec-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "bd8d99ff-743d-4b0e-a709-94d6dc6861ec" (UID: "bd8d99ff-743d-4b0e-a709-94d6dc6861ec"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 08:31:39 crc kubenswrapper[4875]: I1007 08:31:39.823387 4875 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bd8d99ff-743d-4b0e-a709-94d6dc6861ec-utilities\") on node \"crc\" DevicePath \"\"" Oct 07 08:31:39 crc kubenswrapper[4875]: I1007 08:31:39.823429 4875 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bd8d99ff-743d-4b0e-a709-94d6dc6861ec-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 07 08:31:39 crc kubenswrapper[4875]: I1007 08:31:39.823442 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vpnjl\" (UniqueName: \"kubernetes.io/projected/bd8d99ff-743d-4b0e-a709-94d6dc6861ec-kube-api-access-vpnjl\") on node \"crc\" DevicePath \"\"" Oct 07 08:31:40 crc kubenswrapper[4875]: I1007 08:31:40.193163 4875 generic.go:334] "Generic (PLEG): container finished" podID="bd8d99ff-743d-4b0e-a709-94d6dc6861ec" containerID="7f3bb0307ca211e163d251b760042ee45a5cbf01b46ec3c07fe42f3f3b958ce2" exitCode=0 Oct 07 08:31:40 crc kubenswrapper[4875]: I1007 08:31:40.193234 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-qzxss" Oct 07 08:31:40 crc kubenswrapper[4875]: I1007 08:31:40.193224 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qzxss" event={"ID":"bd8d99ff-743d-4b0e-a709-94d6dc6861ec","Type":"ContainerDied","Data":"7f3bb0307ca211e163d251b760042ee45a5cbf01b46ec3c07fe42f3f3b958ce2"} Oct 07 08:31:40 crc kubenswrapper[4875]: I1007 08:31:40.193347 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qzxss" event={"ID":"bd8d99ff-743d-4b0e-a709-94d6dc6861ec","Type":"ContainerDied","Data":"bd22a939a1b4a1632c0b9e8c8deab79ca4f59bb417468be8ad6be676bfe5294e"} Oct 07 08:31:40 crc kubenswrapper[4875]: I1007 08:31:40.193377 4875 scope.go:117] "RemoveContainer" containerID="7f3bb0307ca211e163d251b760042ee45a5cbf01b46ec3c07fe42f3f3b958ce2" Oct 07 08:31:40 crc kubenswrapper[4875]: I1007 08:31:40.221122 4875 scope.go:117] "RemoveContainer" containerID="4834a8a14af5f33c8407aa2d274a82d2ce43b789cd3a395b19da4c719a84379e" Oct 07 08:31:40 crc kubenswrapper[4875]: I1007 08:31:40.241702 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-qzxss"] Oct 07 08:31:40 crc kubenswrapper[4875]: I1007 08:31:40.256785 4875 scope.go:117] "RemoveContainer" containerID="5c5281fadf544f4f1ac8a4edd85842634b9580b4a637336ddc054e10320bac60" Oct 07 08:31:40 crc kubenswrapper[4875]: I1007 08:31:40.257977 4875 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-qzxss"] Oct 07 08:31:40 crc kubenswrapper[4875]: I1007 08:31:40.311848 4875 scope.go:117] "RemoveContainer" containerID="7f3bb0307ca211e163d251b760042ee45a5cbf01b46ec3c07fe42f3f3b958ce2" Oct 07 08:31:40 crc kubenswrapper[4875]: E1007 08:31:40.312411 4875 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7f3bb0307ca211e163d251b760042ee45a5cbf01b46ec3c07fe42f3f3b958ce2\": container with ID starting with 7f3bb0307ca211e163d251b760042ee45a5cbf01b46ec3c07fe42f3f3b958ce2 not found: ID does not exist" containerID="7f3bb0307ca211e163d251b760042ee45a5cbf01b46ec3c07fe42f3f3b958ce2" Oct 07 08:31:40 crc kubenswrapper[4875]: I1007 08:31:40.312450 4875 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7f3bb0307ca211e163d251b760042ee45a5cbf01b46ec3c07fe42f3f3b958ce2"} err="failed to get container status \"7f3bb0307ca211e163d251b760042ee45a5cbf01b46ec3c07fe42f3f3b958ce2\": rpc error: code = NotFound desc = could not find container \"7f3bb0307ca211e163d251b760042ee45a5cbf01b46ec3c07fe42f3f3b958ce2\": container with ID starting with 7f3bb0307ca211e163d251b760042ee45a5cbf01b46ec3c07fe42f3f3b958ce2 not found: ID does not exist" Oct 07 08:31:40 crc kubenswrapper[4875]: I1007 08:31:40.312492 4875 scope.go:117] "RemoveContainer" containerID="4834a8a14af5f33c8407aa2d274a82d2ce43b789cd3a395b19da4c719a84379e" Oct 07 08:31:40 crc kubenswrapper[4875]: E1007 08:31:40.312724 4875 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4834a8a14af5f33c8407aa2d274a82d2ce43b789cd3a395b19da4c719a84379e\": container with ID starting with 4834a8a14af5f33c8407aa2d274a82d2ce43b789cd3a395b19da4c719a84379e not found: ID does not exist" containerID="4834a8a14af5f33c8407aa2d274a82d2ce43b789cd3a395b19da4c719a84379e" Oct 07 08:31:40 crc kubenswrapper[4875]: I1007 08:31:40.312752 4875 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4834a8a14af5f33c8407aa2d274a82d2ce43b789cd3a395b19da4c719a84379e"} err="failed to get container status \"4834a8a14af5f33c8407aa2d274a82d2ce43b789cd3a395b19da4c719a84379e\": rpc error: code = NotFound desc = could not find container \"4834a8a14af5f33c8407aa2d274a82d2ce43b789cd3a395b19da4c719a84379e\": container with ID starting with 4834a8a14af5f33c8407aa2d274a82d2ce43b789cd3a395b19da4c719a84379e not found: ID does not exist" Oct 07 08:31:40 crc kubenswrapper[4875]: I1007 08:31:40.312773 4875 scope.go:117] "RemoveContainer" containerID="5c5281fadf544f4f1ac8a4edd85842634b9580b4a637336ddc054e10320bac60" Oct 07 08:31:40 crc kubenswrapper[4875]: E1007 08:31:40.313548 4875 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5c5281fadf544f4f1ac8a4edd85842634b9580b4a637336ddc054e10320bac60\": container with ID starting with 5c5281fadf544f4f1ac8a4edd85842634b9580b4a637336ddc054e10320bac60 not found: ID does not exist" containerID="5c5281fadf544f4f1ac8a4edd85842634b9580b4a637336ddc054e10320bac60" Oct 07 08:31:40 crc kubenswrapper[4875]: I1007 08:31:40.313582 4875 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5c5281fadf544f4f1ac8a4edd85842634b9580b4a637336ddc054e10320bac60"} err="failed to get container status \"5c5281fadf544f4f1ac8a4edd85842634b9580b4a637336ddc054e10320bac60\": rpc error: code = NotFound desc = could not find container \"5c5281fadf544f4f1ac8a4edd85842634b9580b4a637336ddc054e10320bac60\": container with ID starting with 5c5281fadf544f4f1ac8a4edd85842634b9580b4a637336ddc054e10320bac60 not found: ID does not exist" Oct 07 08:31:41 crc kubenswrapper[4875]: I1007 08:31:41.710468 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd8d99ff-743d-4b0e-a709-94d6dc6861ec" path="/var/lib/kubelet/pods/bd8d99ff-743d-4b0e-a709-94d6dc6861ec/volumes" Oct 07 08:32:01 crc kubenswrapper[4875]: I1007 08:32:01.221090 4875 patch_prober.go:28] interesting pod/machine-config-daemon-hx68m container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 07 08:32:01 crc kubenswrapper[4875]: I1007 08:32:01.221608 4875 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" podUID="3928c10c-c3da-41eb-96b2-629d67cfb31f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 07 08:32:15 crc kubenswrapper[4875]: I1007 08:32:15.333895 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-ttx5b"] Oct 07 08:32:15 crc kubenswrapper[4875]: E1007 08:32:15.336159 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="85d403c8-c331-4ec9-87e0-c830342e4abd" containerName="registry-server" Oct 07 08:32:15 crc kubenswrapper[4875]: I1007 08:32:15.336260 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="85d403c8-c331-4ec9-87e0-c830342e4abd" containerName="registry-server" Oct 07 08:32:15 crc kubenswrapper[4875]: E1007 08:32:15.336330 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bd8d99ff-743d-4b0e-a709-94d6dc6861ec" containerName="extract-content" Oct 07 08:32:15 crc kubenswrapper[4875]: I1007 08:32:15.336396 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="bd8d99ff-743d-4b0e-a709-94d6dc6861ec" containerName="extract-content" Oct 07 08:32:15 crc kubenswrapper[4875]: E1007 08:32:15.336462 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="85d403c8-c331-4ec9-87e0-c830342e4abd" containerName="extract-utilities" Oct 07 08:32:15 crc kubenswrapper[4875]: I1007 08:32:15.336542 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="85d403c8-c331-4ec9-87e0-c830342e4abd" containerName="extract-utilities" Oct 07 08:32:15 crc kubenswrapper[4875]: E1007 08:32:15.336641 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bd8d99ff-743d-4b0e-a709-94d6dc6861ec" containerName="extract-utilities" Oct 07 08:32:15 crc kubenswrapper[4875]: I1007 08:32:15.336730 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="bd8d99ff-743d-4b0e-a709-94d6dc6861ec" containerName="extract-utilities" Oct 07 08:32:15 crc kubenswrapper[4875]: E1007 08:32:15.336833 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="85d403c8-c331-4ec9-87e0-c830342e4abd" containerName="extract-content" Oct 07 08:32:15 crc kubenswrapper[4875]: I1007 08:32:15.337388 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="85d403c8-c331-4ec9-87e0-c830342e4abd" containerName="extract-content" Oct 07 08:32:15 crc kubenswrapper[4875]: E1007 08:32:15.337497 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bd8d99ff-743d-4b0e-a709-94d6dc6861ec" containerName="registry-server" Oct 07 08:32:15 crc kubenswrapper[4875]: I1007 08:32:15.337586 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="bd8d99ff-743d-4b0e-a709-94d6dc6861ec" containerName="registry-server" Oct 07 08:32:15 crc kubenswrapper[4875]: I1007 08:32:15.338040 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="bd8d99ff-743d-4b0e-a709-94d6dc6861ec" containerName="registry-server" Oct 07 08:32:15 crc kubenswrapper[4875]: I1007 08:32:15.338183 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="85d403c8-c331-4ec9-87e0-c830342e4abd" containerName="registry-server" Oct 07 08:32:15 crc kubenswrapper[4875]: I1007 08:32:15.340219 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-ttx5b" Oct 07 08:32:15 crc kubenswrapper[4875]: I1007 08:32:15.347613 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-ttx5b"] Oct 07 08:32:15 crc kubenswrapper[4875]: I1007 08:32:15.455908 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ff44bd1b-cb7c-4262-813c-565a3c520591-catalog-content\") pod \"certified-operators-ttx5b\" (UID: \"ff44bd1b-cb7c-4262-813c-565a3c520591\") " pod="openshift-marketplace/certified-operators-ttx5b" Oct 07 08:32:15 crc kubenswrapper[4875]: I1007 08:32:15.456267 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ff44bd1b-cb7c-4262-813c-565a3c520591-utilities\") pod \"certified-operators-ttx5b\" (UID: \"ff44bd1b-cb7c-4262-813c-565a3c520591\") " pod="openshift-marketplace/certified-operators-ttx5b" Oct 07 08:32:15 crc kubenswrapper[4875]: I1007 08:32:15.456300 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6lvnx\" (UniqueName: \"kubernetes.io/projected/ff44bd1b-cb7c-4262-813c-565a3c520591-kube-api-access-6lvnx\") pod \"certified-operators-ttx5b\" (UID: \"ff44bd1b-cb7c-4262-813c-565a3c520591\") " pod="openshift-marketplace/certified-operators-ttx5b" Oct 07 08:32:15 crc kubenswrapper[4875]: I1007 08:32:15.558492 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ff44bd1b-cb7c-4262-813c-565a3c520591-catalog-content\") pod \"certified-operators-ttx5b\" (UID: \"ff44bd1b-cb7c-4262-813c-565a3c520591\") " pod="openshift-marketplace/certified-operators-ttx5b" Oct 07 08:32:15 crc kubenswrapper[4875]: I1007 08:32:15.558546 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ff44bd1b-cb7c-4262-813c-565a3c520591-utilities\") pod \"certified-operators-ttx5b\" (UID: \"ff44bd1b-cb7c-4262-813c-565a3c520591\") " pod="openshift-marketplace/certified-operators-ttx5b" Oct 07 08:32:15 crc kubenswrapper[4875]: I1007 08:32:15.558577 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6lvnx\" (UniqueName: \"kubernetes.io/projected/ff44bd1b-cb7c-4262-813c-565a3c520591-kube-api-access-6lvnx\") pod \"certified-operators-ttx5b\" (UID: \"ff44bd1b-cb7c-4262-813c-565a3c520591\") " pod="openshift-marketplace/certified-operators-ttx5b" Oct 07 08:32:15 crc kubenswrapper[4875]: I1007 08:32:15.559164 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ff44bd1b-cb7c-4262-813c-565a3c520591-catalog-content\") pod \"certified-operators-ttx5b\" (UID: \"ff44bd1b-cb7c-4262-813c-565a3c520591\") " pod="openshift-marketplace/certified-operators-ttx5b" Oct 07 08:32:15 crc kubenswrapper[4875]: I1007 08:32:15.559210 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ff44bd1b-cb7c-4262-813c-565a3c520591-utilities\") pod \"certified-operators-ttx5b\" (UID: \"ff44bd1b-cb7c-4262-813c-565a3c520591\") " pod="openshift-marketplace/certified-operators-ttx5b" Oct 07 08:32:15 crc kubenswrapper[4875]: I1007 08:32:15.583797 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6lvnx\" (UniqueName: \"kubernetes.io/projected/ff44bd1b-cb7c-4262-813c-565a3c520591-kube-api-access-6lvnx\") pod \"certified-operators-ttx5b\" (UID: \"ff44bd1b-cb7c-4262-813c-565a3c520591\") " pod="openshift-marketplace/certified-operators-ttx5b" Oct 07 08:32:15 crc kubenswrapper[4875]: I1007 08:32:15.669060 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-ttx5b" Oct 07 08:32:16 crc kubenswrapper[4875]: I1007 08:32:16.170982 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-ttx5b"] Oct 07 08:32:16 crc kubenswrapper[4875]: I1007 08:32:16.507748 4875 generic.go:334] "Generic (PLEG): container finished" podID="ff44bd1b-cb7c-4262-813c-565a3c520591" containerID="c9f8f7728fe17f05624461edb8189e7e889709a3ddcec5339b1ef9401fcb7616" exitCode=0 Oct 07 08:32:16 crc kubenswrapper[4875]: I1007 08:32:16.507923 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ttx5b" event={"ID":"ff44bd1b-cb7c-4262-813c-565a3c520591","Type":"ContainerDied","Data":"c9f8f7728fe17f05624461edb8189e7e889709a3ddcec5339b1ef9401fcb7616"} Oct 07 08:32:16 crc kubenswrapper[4875]: I1007 08:32:16.508104 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ttx5b" event={"ID":"ff44bd1b-cb7c-4262-813c-565a3c520591","Type":"ContainerStarted","Data":"8c7fc23c599390b4888ebbaf20d82a71715de74ad31cffe621d01cdd142621f1"} Oct 07 08:32:17 crc kubenswrapper[4875]: I1007 08:32:17.520164 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ttx5b" event={"ID":"ff44bd1b-cb7c-4262-813c-565a3c520591","Type":"ContainerStarted","Data":"b0eb2d36355533a1ab66fac90e0168e9d83d748ca69a6d92baa963225134b13e"} Oct 07 08:32:18 crc kubenswrapper[4875]: I1007 08:32:18.532496 4875 generic.go:334] "Generic (PLEG): container finished" podID="ff44bd1b-cb7c-4262-813c-565a3c520591" containerID="b0eb2d36355533a1ab66fac90e0168e9d83d748ca69a6d92baa963225134b13e" exitCode=0 Oct 07 08:32:18 crc kubenswrapper[4875]: I1007 08:32:18.532593 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ttx5b" event={"ID":"ff44bd1b-cb7c-4262-813c-565a3c520591","Type":"ContainerDied","Data":"b0eb2d36355533a1ab66fac90e0168e9d83d748ca69a6d92baa963225134b13e"} Oct 07 08:32:19 crc kubenswrapper[4875]: I1007 08:32:19.546923 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ttx5b" event={"ID":"ff44bd1b-cb7c-4262-813c-565a3c520591","Type":"ContainerStarted","Data":"52178c76e1a59bf89da0d3d5f89d16d9bb402755221216ed89af33c2fd1acc69"} Oct 07 08:32:19 crc kubenswrapper[4875]: I1007 08:32:19.578961 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-ttx5b" podStartSLOduration=2.114952701 podStartE2EDuration="4.578935258s" podCreationTimestamp="2025-10-07 08:32:15 +0000 UTC" firstStartedPulling="2025-10-07 08:32:16.50926367 +0000 UTC m=+2161.469034213" lastFinishedPulling="2025-10-07 08:32:18.973246207 +0000 UTC m=+2163.933016770" observedRunningTime="2025-10-07 08:32:19.573482713 +0000 UTC m=+2164.533253286" watchObservedRunningTime="2025-10-07 08:32:19.578935258 +0000 UTC m=+2164.538705811" Oct 07 08:32:25 crc kubenswrapper[4875]: I1007 08:32:25.670300 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-ttx5b" Oct 07 08:32:25 crc kubenswrapper[4875]: I1007 08:32:25.670659 4875 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-ttx5b" Oct 07 08:32:25 crc kubenswrapper[4875]: I1007 08:32:25.713011 4875 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-ttx5b" Oct 07 08:32:26 crc kubenswrapper[4875]: I1007 08:32:26.671640 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-ttx5b" Oct 07 08:32:26 crc kubenswrapper[4875]: I1007 08:32:26.726368 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-ttx5b"] Oct 07 08:32:28 crc kubenswrapper[4875]: I1007 08:32:28.641171 4875 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-ttx5b" podUID="ff44bd1b-cb7c-4262-813c-565a3c520591" containerName="registry-server" containerID="cri-o://52178c76e1a59bf89da0d3d5f89d16d9bb402755221216ed89af33c2fd1acc69" gracePeriod=2 Oct 07 08:32:29 crc kubenswrapper[4875]: I1007 08:32:29.100083 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-ttx5b" Oct 07 08:32:29 crc kubenswrapper[4875]: I1007 08:32:29.219319 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ff44bd1b-cb7c-4262-813c-565a3c520591-catalog-content\") pod \"ff44bd1b-cb7c-4262-813c-565a3c520591\" (UID: \"ff44bd1b-cb7c-4262-813c-565a3c520591\") " Oct 07 08:32:29 crc kubenswrapper[4875]: I1007 08:32:29.219511 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6lvnx\" (UniqueName: \"kubernetes.io/projected/ff44bd1b-cb7c-4262-813c-565a3c520591-kube-api-access-6lvnx\") pod \"ff44bd1b-cb7c-4262-813c-565a3c520591\" (UID: \"ff44bd1b-cb7c-4262-813c-565a3c520591\") " Oct 07 08:32:29 crc kubenswrapper[4875]: I1007 08:32:29.219565 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ff44bd1b-cb7c-4262-813c-565a3c520591-utilities\") pod \"ff44bd1b-cb7c-4262-813c-565a3c520591\" (UID: \"ff44bd1b-cb7c-4262-813c-565a3c520591\") " Oct 07 08:32:29 crc kubenswrapper[4875]: I1007 08:32:29.220761 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ff44bd1b-cb7c-4262-813c-565a3c520591-utilities" (OuterVolumeSpecName: "utilities") pod "ff44bd1b-cb7c-4262-813c-565a3c520591" (UID: "ff44bd1b-cb7c-4262-813c-565a3c520591"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 08:32:29 crc kubenswrapper[4875]: I1007 08:32:29.234222 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ff44bd1b-cb7c-4262-813c-565a3c520591-kube-api-access-6lvnx" (OuterVolumeSpecName: "kube-api-access-6lvnx") pod "ff44bd1b-cb7c-4262-813c-565a3c520591" (UID: "ff44bd1b-cb7c-4262-813c-565a3c520591"). InnerVolumeSpecName "kube-api-access-6lvnx". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 08:32:29 crc kubenswrapper[4875]: I1007 08:32:29.277796 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ff44bd1b-cb7c-4262-813c-565a3c520591-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ff44bd1b-cb7c-4262-813c-565a3c520591" (UID: "ff44bd1b-cb7c-4262-813c-565a3c520591"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 08:32:29 crc kubenswrapper[4875]: I1007 08:32:29.322978 4875 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ff44bd1b-cb7c-4262-813c-565a3c520591-utilities\") on node \"crc\" DevicePath \"\"" Oct 07 08:32:29 crc kubenswrapper[4875]: I1007 08:32:29.323032 4875 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ff44bd1b-cb7c-4262-813c-565a3c520591-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 07 08:32:29 crc kubenswrapper[4875]: I1007 08:32:29.323051 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6lvnx\" (UniqueName: \"kubernetes.io/projected/ff44bd1b-cb7c-4262-813c-565a3c520591-kube-api-access-6lvnx\") on node \"crc\" DevicePath \"\"" Oct 07 08:32:29 crc kubenswrapper[4875]: I1007 08:32:29.655612 4875 generic.go:334] "Generic (PLEG): container finished" podID="ff44bd1b-cb7c-4262-813c-565a3c520591" containerID="52178c76e1a59bf89da0d3d5f89d16d9bb402755221216ed89af33c2fd1acc69" exitCode=0 Oct 07 08:32:29 crc kubenswrapper[4875]: I1007 08:32:29.655867 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ttx5b" event={"ID":"ff44bd1b-cb7c-4262-813c-565a3c520591","Type":"ContainerDied","Data":"52178c76e1a59bf89da0d3d5f89d16d9bb402755221216ed89af33c2fd1acc69"} Oct 07 08:32:29 crc kubenswrapper[4875]: I1007 08:32:29.657519 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ttx5b" event={"ID":"ff44bd1b-cb7c-4262-813c-565a3c520591","Type":"ContainerDied","Data":"8c7fc23c599390b4888ebbaf20d82a71715de74ad31cffe621d01cdd142621f1"} Oct 07 08:32:29 crc kubenswrapper[4875]: I1007 08:32:29.657646 4875 scope.go:117] "RemoveContainer" containerID="52178c76e1a59bf89da0d3d5f89d16d9bb402755221216ed89af33c2fd1acc69" Oct 07 08:32:29 crc kubenswrapper[4875]: I1007 08:32:29.656011 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-ttx5b" Oct 07 08:32:29 crc kubenswrapper[4875]: I1007 08:32:29.691703 4875 scope.go:117] "RemoveContainer" containerID="b0eb2d36355533a1ab66fac90e0168e9d83d748ca69a6d92baa963225134b13e" Oct 07 08:32:29 crc kubenswrapper[4875]: I1007 08:32:29.721414 4875 scope.go:117] "RemoveContainer" containerID="c9f8f7728fe17f05624461edb8189e7e889709a3ddcec5339b1ef9401fcb7616" Oct 07 08:32:29 crc kubenswrapper[4875]: I1007 08:32:29.734410 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-ttx5b"] Oct 07 08:32:29 crc kubenswrapper[4875]: I1007 08:32:29.734463 4875 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-ttx5b"] Oct 07 08:32:29 crc kubenswrapper[4875]: I1007 08:32:29.777328 4875 scope.go:117] "RemoveContainer" containerID="52178c76e1a59bf89da0d3d5f89d16d9bb402755221216ed89af33c2fd1acc69" Oct 07 08:32:29 crc kubenswrapper[4875]: E1007 08:32:29.777776 4875 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"52178c76e1a59bf89da0d3d5f89d16d9bb402755221216ed89af33c2fd1acc69\": container with ID starting with 52178c76e1a59bf89da0d3d5f89d16d9bb402755221216ed89af33c2fd1acc69 not found: ID does not exist" containerID="52178c76e1a59bf89da0d3d5f89d16d9bb402755221216ed89af33c2fd1acc69" Oct 07 08:32:29 crc kubenswrapper[4875]: I1007 08:32:29.777831 4875 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"52178c76e1a59bf89da0d3d5f89d16d9bb402755221216ed89af33c2fd1acc69"} err="failed to get container status \"52178c76e1a59bf89da0d3d5f89d16d9bb402755221216ed89af33c2fd1acc69\": rpc error: code = NotFound desc = could not find container \"52178c76e1a59bf89da0d3d5f89d16d9bb402755221216ed89af33c2fd1acc69\": container with ID starting with 52178c76e1a59bf89da0d3d5f89d16d9bb402755221216ed89af33c2fd1acc69 not found: ID does not exist" Oct 07 08:32:29 crc kubenswrapper[4875]: I1007 08:32:29.777861 4875 scope.go:117] "RemoveContainer" containerID="b0eb2d36355533a1ab66fac90e0168e9d83d748ca69a6d92baa963225134b13e" Oct 07 08:32:29 crc kubenswrapper[4875]: E1007 08:32:29.778237 4875 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b0eb2d36355533a1ab66fac90e0168e9d83d748ca69a6d92baa963225134b13e\": container with ID starting with b0eb2d36355533a1ab66fac90e0168e9d83d748ca69a6d92baa963225134b13e not found: ID does not exist" containerID="b0eb2d36355533a1ab66fac90e0168e9d83d748ca69a6d92baa963225134b13e" Oct 07 08:32:29 crc kubenswrapper[4875]: I1007 08:32:29.778280 4875 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b0eb2d36355533a1ab66fac90e0168e9d83d748ca69a6d92baa963225134b13e"} err="failed to get container status \"b0eb2d36355533a1ab66fac90e0168e9d83d748ca69a6d92baa963225134b13e\": rpc error: code = NotFound desc = could not find container \"b0eb2d36355533a1ab66fac90e0168e9d83d748ca69a6d92baa963225134b13e\": container with ID starting with b0eb2d36355533a1ab66fac90e0168e9d83d748ca69a6d92baa963225134b13e not found: ID does not exist" Oct 07 08:32:29 crc kubenswrapper[4875]: I1007 08:32:29.778311 4875 scope.go:117] "RemoveContainer" containerID="c9f8f7728fe17f05624461edb8189e7e889709a3ddcec5339b1ef9401fcb7616" Oct 07 08:32:29 crc kubenswrapper[4875]: E1007 08:32:29.778580 4875 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c9f8f7728fe17f05624461edb8189e7e889709a3ddcec5339b1ef9401fcb7616\": container with ID starting with c9f8f7728fe17f05624461edb8189e7e889709a3ddcec5339b1ef9401fcb7616 not found: ID does not exist" containerID="c9f8f7728fe17f05624461edb8189e7e889709a3ddcec5339b1ef9401fcb7616" Oct 07 08:32:29 crc kubenswrapper[4875]: I1007 08:32:29.778626 4875 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c9f8f7728fe17f05624461edb8189e7e889709a3ddcec5339b1ef9401fcb7616"} err="failed to get container status \"c9f8f7728fe17f05624461edb8189e7e889709a3ddcec5339b1ef9401fcb7616\": rpc error: code = NotFound desc = could not find container \"c9f8f7728fe17f05624461edb8189e7e889709a3ddcec5339b1ef9401fcb7616\": container with ID starting with c9f8f7728fe17f05624461edb8189e7e889709a3ddcec5339b1ef9401fcb7616 not found: ID does not exist" Oct 07 08:32:31 crc kubenswrapper[4875]: I1007 08:32:31.221611 4875 patch_prober.go:28] interesting pod/machine-config-daemon-hx68m container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 07 08:32:31 crc kubenswrapper[4875]: I1007 08:32:31.221987 4875 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" podUID="3928c10c-c3da-41eb-96b2-629d67cfb31f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 07 08:32:31 crc kubenswrapper[4875]: I1007 08:32:31.707239 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ff44bd1b-cb7c-4262-813c-565a3c520591" path="/var/lib/kubelet/pods/ff44bd1b-cb7c-4262-813c-565a3c520591/volumes" Oct 07 08:33:01 crc kubenswrapper[4875]: I1007 08:33:01.221261 4875 patch_prober.go:28] interesting pod/machine-config-daemon-hx68m container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 07 08:33:01 crc kubenswrapper[4875]: I1007 08:33:01.221958 4875 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" podUID="3928c10c-c3da-41eb-96b2-629d67cfb31f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 07 08:33:01 crc kubenswrapper[4875]: I1007 08:33:01.222028 4875 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" Oct 07 08:33:01 crc kubenswrapper[4875]: I1007 08:33:01.223114 4875 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"c61b9f79db1f75afdfdc3343c9fbda35db032a5a8f913522757b64d90f7ef515"} pod="openshift-machine-config-operator/machine-config-daemon-hx68m" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 07 08:33:01 crc kubenswrapper[4875]: I1007 08:33:01.223198 4875 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" podUID="3928c10c-c3da-41eb-96b2-629d67cfb31f" containerName="machine-config-daemon" containerID="cri-o://c61b9f79db1f75afdfdc3343c9fbda35db032a5a8f913522757b64d90f7ef515" gracePeriod=600 Oct 07 08:33:01 crc kubenswrapper[4875]: E1007 08:33:01.348428 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hx68m_openshift-machine-config-operator(3928c10c-c3da-41eb-96b2-629d67cfb31f)\"" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" podUID="3928c10c-c3da-41eb-96b2-629d67cfb31f" Oct 07 08:33:01 crc kubenswrapper[4875]: I1007 08:33:01.937187 4875 generic.go:334] "Generic (PLEG): container finished" podID="3928c10c-c3da-41eb-96b2-629d67cfb31f" containerID="c61b9f79db1f75afdfdc3343c9fbda35db032a5a8f913522757b64d90f7ef515" exitCode=0 Oct 07 08:33:01 crc kubenswrapper[4875]: I1007 08:33:01.937228 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" event={"ID":"3928c10c-c3da-41eb-96b2-629d67cfb31f","Type":"ContainerDied","Data":"c61b9f79db1f75afdfdc3343c9fbda35db032a5a8f913522757b64d90f7ef515"} Oct 07 08:33:01 crc kubenswrapper[4875]: I1007 08:33:01.937260 4875 scope.go:117] "RemoveContainer" containerID="d9995d2c34b022ebc0e71bdaef75f97ecc970ab076a5c2eb0143ce90dc857f1a" Oct 07 08:33:01 crc kubenswrapper[4875]: I1007 08:33:01.938126 4875 scope.go:117] "RemoveContainer" containerID="c61b9f79db1f75afdfdc3343c9fbda35db032a5a8f913522757b64d90f7ef515" Oct 07 08:33:01 crc kubenswrapper[4875]: E1007 08:33:01.938612 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hx68m_openshift-machine-config-operator(3928c10c-c3da-41eb-96b2-629d67cfb31f)\"" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" podUID="3928c10c-c3da-41eb-96b2-629d67cfb31f" Oct 07 08:33:14 crc kubenswrapper[4875]: I1007 08:33:14.698169 4875 scope.go:117] "RemoveContainer" containerID="c61b9f79db1f75afdfdc3343c9fbda35db032a5a8f913522757b64d90f7ef515" Oct 07 08:33:14 crc kubenswrapper[4875]: E1007 08:33:14.699199 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hx68m_openshift-machine-config-operator(3928c10c-c3da-41eb-96b2-629d67cfb31f)\"" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" podUID="3928c10c-c3da-41eb-96b2-629d67cfb31f" Oct 07 08:33:25 crc kubenswrapper[4875]: I1007 08:33:25.704142 4875 scope.go:117] "RemoveContainer" containerID="c61b9f79db1f75afdfdc3343c9fbda35db032a5a8f913522757b64d90f7ef515" Oct 07 08:33:25 crc kubenswrapper[4875]: E1007 08:33:25.705030 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hx68m_openshift-machine-config-operator(3928c10c-c3da-41eb-96b2-629d67cfb31f)\"" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" podUID="3928c10c-c3da-41eb-96b2-629d67cfb31f" Oct 07 08:33:40 crc kubenswrapper[4875]: I1007 08:33:40.698130 4875 scope.go:117] "RemoveContainer" containerID="c61b9f79db1f75afdfdc3343c9fbda35db032a5a8f913522757b64d90f7ef515" Oct 07 08:33:40 crc kubenswrapper[4875]: E1007 08:33:40.700345 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hx68m_openshift-machine-config-operator(3928c10c-c3da-41eb-96b2-629d67cfb31f)\"" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" podUID="3928c10c-c3da-41eb-96b2-629d67cfb31f" Oct 07 08:33:52 crc kubenswrapper[4875]: I1007 08:33:52.698121 4875 scope.go:117] "RemoveContainer" containerID="c61b9f79db1f75afdfdc3343c9fbda35db032a5a8f913522757b64d90f7ef515" Oct 07 08:33:52 crc kubenswrapper[4875]: E1007 08:33:52.700186 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hx68m_openshift-machine-config-operator(3928c10c-c3da-41eb-96b2-629d67cfb31f)\"" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" podUID="3928c10c-c3da-41eb-96b2-629d67cfb31f" Oct 07 08:34:03 crc kubenswrapper[4875]: I1007 08:34:03.699108 4875 scope.go:117] "RemoveContainer" containerID="c61b9f79db1f75afdfdc3343c9fbda35db032a5a8f913522757b64d90f7ef515" Oct 07 08:34:03 crc kubenswrapper[4875]: E1007 08:34:03.701076 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hx68m_openshift-machine-config-operator(3928c10c-c3da-41eb-96b2-629d67cfb31f)\"" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" podUID="3928c10c-c3da-41eb-96b2-629d67cfb31f" Oct 07 08:34:15 crc kubenswrapper[4875]: I1007 08:34:15.705725 4875 scope.go:117] "RemoveContainer" containerID="c61b9f79db1f75afdfdc3343c9fbda35db032a5a8f913522757b64d90f7ef515" Oct 07 08:34:15 crc kubenswrapper[4875]: E1007 08:34:15.706532 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hx68m_openshift-machine-config-operator(3928c10c-c3da-41eb-96b2-629d67cfb31f)\"" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" podUID="3928c10c-c3da-41eb-96b2-629d67cfb31f" Oct 07 08:34:27 crc kubenswrapper[4875]: I1007 08:34:27.697372 4875 scope.go:117] "RemoveContainer" containerID="c61b9f79db1f75afdfdc3343c9fbda35db032a5a8f913522757b64d90f7ef515" Oct 07 08:34:27 crc kubenswrapper[4875]: E1007 08:34:27.698169 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hx68m_openshift-machine-config-operator(3928c10c-c3da-41eb-96b2-629d67cfb31f)\"" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" podUID="3928c10c-c3da-41eb-96b2-629d67cfb31f" Oct 07 08:34:41 crc kubenswrapper[4875]: I1007 08:34:41.698338 4875 scope.go:117] "RemoveContainer" containerID="c61b9f79db1f75afdfdc3343c9fbda35db032a5a8f913522757b64d90f7ef515" Oct 07 08:34:41 crc kubenswrapper[4875]: E1007 08:34:41.699113 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hx68m_openshift-machine-config-operator(3928c10c-c3da-41eb-96b2-629d67cfb31f)\"" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" podUID="3928c10c-c3da-41eb-96b2-629d67cfb31f" Oct 07 08:34:54 crc kubenswrapper[4875]: I1007 08:34:54.697977 4875 scope.go:117] "RemoveContainer" containerID="c61b9f79db1f75afdfdc3343c9fbda35db032a5a8f913522757b64d90f7ef515" Oct 07 08:34:54 crc kubenswrapper[4875]: E1007 08:34:54.698912 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hx68m_openshift-machine-config-operator(3928c10c-c3da-41eb-96b2-629d67cfb31f)\"" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" podUID="3928c10c-c3da-41eb-96b2-629d67cfb31f" Oct 07 08:35:07 crc kubenswrapper[4875]: I1007 08:35:07.697888 4875 scope.go:117] "RemoveContainer" containerID="c61b9f79db1f75afdfdc3343c9fbda35db032a5a8f913522757b64d90f7ef515" Oct 07 08:35:07 crc kubenswrapper[4875]: E1007 08:35:07.698643 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hx68m_openshift-machine-config-operator(3928c10c-c3da-41eb-96b2-629d67cfb31f)\"" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" podUID="3928c10c-c3da-41eb-96b2-629d67cfb31f" Oct 07 08:35:18 crc kubenswrapper[4875]: I1007 08:35:18.697091 4875 scope.go:117] "RemoveContainer" containerID="c61b9f79db1f75afdfdc3343c9fbda35db032a5a8f913522757b64d90f7ef515" Oct 07 08:35:18 crc kubenswrapper[4875]: E1007 08:35:18.697981 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hx68m_openshift-machine-config-operator(3928c10c-c3da-41eb-96b2-629d67cfb31f)\"" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" podUID="3928c10c-c3da-41eb-96b2-629d67cfb31f" Oct 07 08:35:31 crc kubenswrapper[4875]: I1007 08:35:31.698044 4875 scope.go:117] "RemoveContainer" containerID="c61b9f79db1f75afdfdc3343c9fbda35db032a5a8f913522757b64d90f7ef515" Oct 07 08:35:31 crc kubenswrapper[4875]: E1007 08:35:31.698920 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hx68m_openshift-machine-config-operator(3928c10c-c3da-41eb-96b2-629d67cfb31f)\"" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" podUID="3928c10c-c3da-41eb-96b2-629d67cfb31f" Oct 07 08:35:42 crc kubenswrapper[4875]: I1007 08:35:42.698111 4875 scope.go:117] "RemoveContainer" containerID="c61b9f79db1f75afdfdc3343c9fbda35db032a5a8f913522757b64d90f7ef515" Oct 07 08:35:42 crc kubenswrapper[4875]: E1007 08:35:42.699017 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hx68m_openshift-machine-config-operator(3928c10c-c3da-41eb-96b2-629d67cfb31f)\"" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" podUID="3928c10c-c3da-41eb-96b2-629d67cfb31f" Oct 07 08:35:56 crc kubenswrapper[4875]: I1007 08:35:56.698004 4875 scope.go:117] "RemoveContainer" containerID="c61b9f79db1f75afdfdc3343c9fbda35db032a5a8f913522757b64d90f7ef515" Oct 07 08:35:56 crc kubenswrapper[4875]: E1007 08:35:56.698780 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hx68m_openshift-machine-config-operator(3928c10c-c3da-41eb-96b2-629d67cfb31f)\"" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" podUID="3928c10c-c3da-41eb-96b2-629d67cfb31f" Oct 07 08:36:05 crc kubenswrapper[4875]: I1007 08:36:05.658189 4875 generic.go:334] "Generic (PLEG): container finished" podID="3ec6c99f-4455-40ea-8a27-bf56298f3e17" containerID="ed817b11eec979fcd4ec67f6e9632ac34e5f7ad39d622eb4e1da1fb701a3599f" exitCode=0 Oct 07 08:36:05 crc kubenswrapper[4875]: I1007 08:36:05.658266 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-69n9w" event={"ID":"3ec6c99f-4455-40ea-8a27-bf56298f3e17","Type":"ContainerDied","Data":"ed817b11eec979fcd4ec67f6e9632ac34e5f7ad39d622eb4e1da1fb701a3599f"} Oct 07 08:36:07 crc kubenswrapper[4875]: I1007 08:36:07.123250 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-69n9w" Oct 07 08:36:07 crc kubenswrapper[4875]: I1007 08:36:07.277745 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3ec6c99f-4455-40ea-8a27-bf56298f3e17-libvirt-combined-ca-bundle\") pod \"3ec6c99f-4455-40ea-8a27-bf56298f3e17\" (UID: \"3ec6c99f-4455-40ea-8a27-bf56298f3e17\") " Oct 07 08:36:07 crc kubenswrapper[4875]: I1007 08:36:07.277797 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9ph54\" (UniqueName: \"kubernetes.io/projected/3ec6c99f-4455-40ea-8a27-bf56298f3e17-kube-api-access-9ph54\") pod \"3ec6c99f-4455-40ea-8a27-bf56298f3e17\" (UID: \"3ec6c99f-4455-40ea-8a27-bf56298f3e17\") " Oct 07 08:36:07 crc kubenswrapper[4875]: I1007 08:36:07.277899 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3ec6c99f-4455-40ea-8a27-bf56298f3e17-ssh-key\") pod \"3ec6c99f-4455-40ea-8a27-bf56298f3e17\" (UID: \"3ec6c99f-4455-40ea-8a27-bf56298f3e17\") " Oct 07 08:36:07 crc kubenswrapper[4875]: I1007 08:36:07.277946 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/3ec6c99f-4455-40ea-8a27-bf56298f3e17-libvirt-secret-0\") pod \"3ec6c99f-4455-40ea-8a27-bf56298f3e17\" (UID: \"3ec6c99f-4455-40ea-8a27-bf56298f3e17\") " Oct 07 08:36:07 crc kubenswrapper[4875]: I1007 08:36:07.278076 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3ec6c99f-4455-40ea-8a27-bf56298f3e17-inventory\") pod \"3ec6c99f-4455-40ea-8a27-bf56298f3e17\" (UID: \"3ec6c99f-4455-40ea-8a27-bf56298f3e17\") " Oct 07 08:36:07 crc kubenswrapper[4875]: I1007 08:36:07.286041 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ec6c99f-4455-40ea-8a27-bf56298f3e17-libvirt-combined-ca-bundle" (OuterVolumeSpecName: "libvirt-combined-ca-bundle") pod "3ec6c99f-4455-40ea-8a27-bf56298f3e17" (UID: "3ec6c99f-4455-40ea-8a27-bf56298f3e17"). InnerVolumeSpecName "libvirt-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:36:07 crc kubenswrapper[4875]: I1007 08:36:07.286114 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ec6c99f-4455-40ea-8a27-bf56298f3e17-kube-api-access-9ph54" (OuterVolumeSpecName: "kube-api-access-9ph54") pod "3ec6c99f-4455-40ea-8a27-bf56298f3e17" (UID: "3ec6c99f-4455-40ea-8a27-bf56298f3e17"). InnerVolumeSpecName "kube-api-access-9ph54". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 08:36:07 crc kubenswrapper[4875]: I1007 08:36:07.305365 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ec6c99f-4455-40ea-8a27-bf56298f3e17-libvirt-secret-0" (OuterVolumeSpecName: "libvirt-secret-0") pod "3ec6c99f-4455-40ea-8a27-bf56298f3e17" (UID: "3ec6c99f-4455-40ea-8a27-bf56298f3e17"). InnerVolumeSpecName "libvirt-secret-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:36:07 crc kubenswrapper[4875]: I1007 08:36:07.307222 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ec6c99f-4455-40ea-8a27-bf56298f3e17-inventory" (OuterVolumeSpecName: "inventory") pod "3ec6c99f-4455-40ea-8a27-bf56298f3e17" (UID: "3ec6c99f-4455-40ea-8a27-bf56298f3e17"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:36:07 crc kubenswrapper[4875]: I1007 08:36:07.307629 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ec6c99f-4455-40ea-8a27-bf56298f3e17-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "3ec6c99f-4455-40ea-8a27-bf56298f3e17" (UID: "3ec6c99f-4455-40ea-8a27-bf56298f3e17"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:36:07 crc kubenswrapper[4875]: I1007 08:36:07.380755 4875 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3ec6c99f-4455-40ea-8a27-bf56298f3e17-inventory\") on node \"crc\" DevicePath \"\"" Oct 07 08:36:07 crc kubenswrapper[4875]: I1007 08:36:07.380980 4875 reconciler_common.go:293] "Volume detached for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3ec6c99f-4455-40ea-8a27-bf56298f3e17-libvirt-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 08:36:07 crc kubenswrapper[4875]: I1007 08:36:07.381079 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9ph54\" (UniqueName: \"kubernetes.io/projected/3ec6c99f-4455-40ea-8a27-bf56298f3e17-kube-api-access-9ph54\") on node \"crc\" DevicePath \"\"" Oct 07 08:36:07 crc kubenswrapper[4875]: I1007 08:36:07.381150 4875 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3ec6c99f-4455-40ea-8a27-bf56298f3e17-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 07 08:36:07 crc kubenswrapper[4875]: I1007 08:36:07.381205 4875 reconciler_common.go:293] "Volume detached for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/3ec6c99f-4455-40ea-8a27-bf56298f3e17-libvirt-secret-0\") on node \"crc\" DevicePath \"\"" Oct 07 08:36:07 crc kubenswrapper[4875]: I1007 08:36:07.677316 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-69n9w" event={"ID":"3ec6c99f-4455-40ea-8a27-bf56298f3e17","Type":"ContainerDied","Data":"373d38bb03e93c641a19f06718c100ee6bdd207be70e2ed1573163ed12e88a99"} Oct 07 08:36:07 crc kubenswrapper[4875]: I1007 08:36:07.677364 4875 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="373d38bb03e93c641a19f06718c100ee6bdd207be70e2ed1573163ed12e88a99" Oct 07 08:36:07 crc kubenswrapper[4875]: I1007 08:36:07.677388 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-69n9w" Oct 07 08:36:07 crc kubenswrapper[4875]: I1007 08:36:07.770072 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-edpm-deployment-openstack-edpm-ipam-kmx8n"] Oct 07 08:36:07 crc kubenswrapper[4875]: E1007 08:36:07.770508 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ff44bd1b-cb7c-4262-813c-565a3c520591" containerName="extract-content" Oct 07 08:36:07 crc kubenswrapper[4875]: I1007 08:36:07.770531 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="ff44bd1b-cb7c-4262-813c-565a3c520591" containerName="extract-content" Oct 07 08:36:07 crc kubenswrapper[4875]: E1007 08:36:07.770557 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ff44bd1b-cb7c-4262-813c-565a3c520591" containerName="registry-server" Oct 07 08:36:07 crc kubenswrapper[4875]: I1007 08:36:07.770566 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="ff44bd1b-cb7c-4262-813c-565a3c520591" containerName="registry-server" Oct 07 08:36:07 crc kubenswrapper[4875]: E1007 08:36:07.770586 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3ec6c99f-4455-40ea-8a27-bf56298f3e17" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Oct 07 08:36:07 crc kubenswrapper[4875]: I1007 08:36:07.770595 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="3ec6c99f-4455-40ea-8a27-bf56298f3e17" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Oct 07 08:36:07 crc kubenswrapper[4875]: E1007 08:36:07.770621 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ff44bd1b-cb7c-4262-813c-565a3c520591" containerName="extract-utilities" Oct 07 08:36:07 crc kubenswrapper[4875]: I1007 08:36:07.770630 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="ff44bd1b-cb7c-4262-813c-565a3c520591" containerName="extract-utilities" Oct 07 08:36:07 crc kubenswrapper[4875]: I1007 08:36:07.770837 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="3ec6c99f-4455-40ea-8a27-bf56298f3e17" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Oct 07 08:36:07 crc kubenswrapper[4875]: I1007 08:36:07.770894 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="ff44bd1b-cb7c-4262-813c-565a3c520591" containerName="registry-server" Oct 07 08:36:07 crc kubenswrapper[4875]: I1007 08:36:07.771509 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-kmx8n" Oct 07 08:36:07 crc kubenswrapper[4875]: I1007 08:36:07.774871 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 07 08:36:07 crc kubenswrapper[4875]: I1007 08:36:07.781803 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-migration-ssh-key" Oct 07 08:36:07 crc kubenswrapper[4875]: I1007 08:36:07.782058 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-bl4pq" Oct 07 08:36:07 crc kubenswrapper[4875]: I1007 08:36:07.782304 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-compute-config" Oct 07 08:36:07 crc kubenswrapper[4875]: I1007 08:36:07.782457 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 07 08:36:07 crc kubenswrapper[4875]: I1007 08:36:07.782619 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 07 08:36:07 crc kubenswrapper[4875]: I1007 08:36:07.782800 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"nova-extra-config" Oct 07 08:36:07 crc kubenswrapper[4875]: I1007 08:36:07.784072 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-edpm-deployment-openstack-edpm-ipam-kmx8n"] Oct 07 08:36:07 crc kubenswrapper[4875]: I1007 08:36:07.891349 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/1e54ad8c-e627-4e27-90d1-ea193eb2f42f-nova-cell1-compute-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-kmx8n\" (UID: \"1e54ad8c-e627-4e27-90d1-ea193eb2f42f\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-kmx8n" Oct 07 08:36:07 crc kubenswrapper[4875]: I1007 08:36:07.891425 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/1e54ad8c-e627-4e27-90d1-ea193eb2f42f-nova-migration-ssh-key-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-kmx8n\" (UID: \"1e54ad8c-e627-4e27-90d1-ea193eb2f42f\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-kmx8n" Oct 07 08:36:07 crc kubenswrapper[4875]: I1007 08:36:07.891493 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/1e54ad8c-e627-4e27-90d1-ea193eb2f42f-nova-migration-ssh-key-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-kmx8n\" (UID: \"1e54ad8c-e627-4e27-90d1-ea193eb2f42f\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-kmx8n" Oct 07 08:36:07 crc kubenswrapper[4875]: I1007 08:36:07.891515 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1e54ad8c-e627-4e27-90d1-ea193eb2f42f-nova-combined-ca-bundle\") pod \"nova-edpm-deployment-openstack-edpm-ipam-kmx8n\" (UID: \"1e54ad8c-e627-4e27-90d1-ea193eb2f42f\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-kmx8n" Oct 07 08:36:07 crc kubenswrapper[4875]: I1007 08:36:07.891593 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n2q5z\" (UniqueName: \"kubernetes.io/projected/1e54ad8c-e627-4e27-90d1-ea193eb2f42f-kube-api-access-n2q5z\") pod \"nova-edpm-deployment-openstack-edpm-ipam-kmx8n\" (UID: \"1e54ad8c-e627-4e27-90d1-ea193eb2f42f\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-kmx8n" Oct 07 08:36:07 crc kubenswrapper[4875]: I1007 08:36:07.891637 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/1e54ad8c-e627-4e27-90d1-ea193eb2f42f-nova-extra-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-kmx8n\" (UID: \"1e54ad8c-e627-4e27-90d1-ea193eb2f42f\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-kmx8n" Oct 07 08:36:07 crc kubenswrapper[4875]: I1007 08:36:07.891682 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1e54ad8c-e627-4e27-90d1-ea193eb2f42f-inventory\") pod \"nova-edpm-deployment-openstack-edpm-ipam-kmx8n\" (UID: \"1e54ad8c-e627-4e27-90d1-ea193eb2f42f\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-kmx8n" Oct 07 08:36:07 crc kubenswrapper[4875]: I1007 08:36:07.891739 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/1e54ad8c-e627-4e27-90d1-ea193eb2f42f-nova-cell1-compute-config-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-kmx8n\" (UID: \"1e54ad8c-e627-4e27-90d1-ea193eb2f42f\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-kmx8n" Oct 07 08:36:07 crc kubenswrapper[4875]: I1007 08:36:07.891792 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/1e54ad8c-e627-4e27-90d1-ea193eb2f42f-ssh-key\") pod \"nova-edpm-deployment-openstack-edpm-ipam-kmx8n\" (UID: \"1e54ad8c-e627-4e27-90d1-ea193eb2f42f\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-kmx8n" Oct 07 08:36:07 crc kubenswrapper[4875]: I1007 08:36:07.992926 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/1e54ad8c-e627-4e27-90d1-ea193eb2f42f-nova-migration-ssh-key-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-kmx8n\" (UID: \"1e54ad8c-e627-4e27-90d1-ea193eb2f42f\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-kmx8n" Oct 07 08:36:07 crc kubenswrapper[4875]: I1007 08:36:07.993013 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/1e54ad8c-e627-4e27-90d1-ea193eb2f42f-nova-migration-ssh-key-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-kmx8n\" (UID: \"1e54ad8c-e627-4e27-90d1-ea193eb2f42f\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-kmx8n" Oct 07 08:36:07 crc kubenswrapper[4875]: I1007 08:36:07.993040 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1e54ad8c-e627-4e27-90d1-ea193eb2f42f-nova-combined-ca-bundle\") pod \"nova-edpm-deployment-openstack-edpm-ipam-kmx8n\" (UID: \"1e54ad8c-e627-4e27-90d1-ea193eb2f42f\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-kmx8n" Oct 07 08:36:07 crc kubenswrapper[4875]: I1007 08:36:07.993085 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n2q5z\" (UniqueName: \"kubernetes.io/projected/1e54ad8c-e627-4e27-90d1-ea193eb2f42f-kube-api-access-n2q5z\") pod \"nova-edpm-deployment-openstack-edpm-ipam-kmx8n\" (UID: \"1e54ad8c-e627-4e27-90d1-ea193eb2f42f\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-kmx8n" Oct 07 08:36:07 crc kubenswrapper[4875]: I1007 08:36:07.993117 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/1e54ad8c-e627-4e27-90d1-ea193eb2f42f-nova-extra-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-kmx8n\" (UID: \"1e54ad8c-e627-4e27-90d1-ea193eb2f42f\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-kmx8n" Oct 07 08:36:07 crc kubenswrapper[4875]: I1007 08:36:07.993157 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1e54ad8c-e627-4e27-90d1-ea193eb2f42f-inventory\") pod \"nova-edpm-deployment-openstack-edpm-ipam-kmx8n\" (UID: \"1e54ad8c-e627-4e27-90d1-ea193eb2f42f\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-kmx8n" Oct 07 08:36:07 crc kubenswrapper[4875]: I1007 08:36:07.993218 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/1e54ad8c-e627-4e27-90d1-ea193eb2f42f-nova-cell1-compute-config-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-kmx8n\" (UID: \"1e54ad8c-e627-4e27-90d1-ea193eb2f42f\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-kmx8n" Oct 07 08:36:07 crc kubenswrapper[4875]: I1007 08:36:07.993280 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/1e54ad8c-e627-4e27-90d1-ea193eb2f42f-ssh-key\") pod \"nova-edpm-deployment-openstack-edpm-ipam-kmx8n\" (UID: \"1e54ad8c-e627-4e27-90d1-ea193eb2f42f\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-kmx8n" Oct 07 08:36:07 crc kubenswrapper[4875]: I1007 08:36:07.993314 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/1e54ad8c-e627-4e27-90d1-ea193eb2f42f-nova-cell1-compute-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-kmx8n\" (UID: \"1e54ad8c-e627-4e27-90d1-ea193eb2f42f\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-kmx8n" Oct 07 08:36:07 crc kubenswrapper[4875]: I1007 08:36:07.994307 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/1e54ad8c-e627-4e27-90d1-ea193eb2f42f-nova-extra-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-kmx8n\" (UID: \"1e54ad8c-e627-4e27-90d1-ea193eb2f42f\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-kmx8n" Oct 07 08:36:07 crc kubenswrapper[4875]: I1007 08:36:07.997688 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1e54ad8c-e627-4e27-90d1-ea193eb2f42f-inventory\") pod \"nova-edpm-deployment-openstack-edpm-ipam-kmx8n\" (UID: \"1e54ad8c-e627-4e27-90d1-ea193eb2f42f\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-kmx8n" Oct 07 08:36:07 crc kubenswrapper[4875]: I1007 08:36:07.997956 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/1e54ad8c-e627-4e27-90d1-ea193eb2f42f-nova-cell1-compute-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-kmx8n\" (UID: \"1e54ad8c-e627-4e27-90d1-ea193eb2f42f\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-kmx8n" Oct 07 08:36:07 crc kubenswrapper[4875]: I1007 08:36:07.998766 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/1e54ad8c-e627-4e27-90d1-ea193eb2f42f-nova-migration-ssh-key-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-kmx8n\" (UID: \"1e54ad8c-e627-4e27-90d1-ea193eb2f42f\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-kmx8n" Oct 07 08:36:08 crc kubenswrapper[4875]: I1007 08:36:08.005318 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/1e54ad8c-e627-4e27-90d1-ea193eb2f42f-ssh-key\") pod \"nova-edpm-deployment-openstack-edpm-ipam-kmx8n\" (UID: \"1e54ad8c-e627-4e27-90d1-ea193eb2f42f\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-kmx8n" Oct 07 08:36:08 crc kubenswrapper[4875]: I1007 08:36:08.006114 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1e54ad8c-e627-4e27-90d1-ea193eb2f42f-nova-combined-ca-bundle\") pod \"nova-edpm-deployment-openstack-edpm-ipam-kmx8n\" (UID: \"1e54ad8c-e627-4e27-90d1-ea193eb2f42f\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-kmx8n" Oct 07 08:36:08 crc kubenswrapper[4875]: I1007 08:36:08.006149 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/1e54ad8c-e627-4e27-90d1-ea193eb2f42f-nova-cell1-compute-config-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-kmx8n\" (UID: \"1e54ad8c-e627-4e27-90d1-ea193eb2f42f\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-kmx8n" Oct 07 08:36:08 crc kubenswrapper[4875]: I1007 08:36:08.009445 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/1e54ad8c-e627-4e27-90d1-ea193eb2f42f-nova-migration-ssh-key-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-kmx8n\" (UID: \"1e54ad8c-e627-4e27-90d1-ea193eb2f42f\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-kmx8n" Oct 07 08:36:08 crc kubenswrapper[4875]: I1007 08:36:08.010656 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n2q5z\" (UniqueName: \"kubernetes.io/projected/1e54ad8c-e627-4e27-90d1-ea193eb2f42f-kube-api-access-n2q5z\") pod \"nova-edpm-deployment-openstack-edpm-ipam-kmx8n\" (UID: \"1e54ad8c-e627-4e27-90d1-ea193eb2f42f\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-kmx8n" Oct 07 08:36:08 crc kubenswrapper[4875]: I1007 08:36:08.089538 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-kmx8n" Oct 07 08:36:08 crc kubenswrapper[4875]: I1007 08:36:08.634632 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-edpm-deployment-openstack-edpm-ipam-kmx8n"] Oct 07 08:36:08 crc kubenswrapper[4875]: I1007 08:36:08.638634 4875 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 07 08:36:08 crc kubenswrapper[4875]: I1007 08:36:08.692506 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-kmx8n" event={"ID":"1e54ad8c-e627-4e27-90d1-ea193eb2f42f","Type":"ContainerStarted","Data":"3f0da03b6b841fdc31b5f095de6190069a225e1be1402a807878f04b3c9b7696"} Oct 07 08:36:09 crc kubenswrapper[4875]: I1007 08:36:09.698065 4875 scope.go:117] "RemoveContainer" containerID="c61b9f79db1f75afdfdc3343c9fbda35db032a5a8f913522757b64d90f7ef515" Oct 07 08:36:09 crc kubenswrapper[4875]: E1007 08:36:09.699900 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hx68m_openshift-machine-config-operator(3928c10c-c3da-41eb-96b2-629d67cfb31f)\"" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" podUID="3928c10c-c3da-41eb-96b2-629d67cfb31f" Oct 07 08:36:10 crc kubenswrapper[4875]: I1007 08:36:10.717778 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-kmx8n" event={"ID":"1e54ad8c-e627-4e27-90d1-ea193eb2f42f","Type":"ContainerStarted","Data":"26067902d513c25cdc0802b8bf651613c35200fce88fc3a366a4527ec03ce462"} Oct 07 08:36:10 crc kubenswrapper[4875]: I1007 08:36:10.739831 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-kmx8n" podStartSLOduration=2.669903955 podStartE2EDuration="3.739804047s" podCreationTimestamp="2025-10-07 08:36:07 +0000 UTC" firstStartedPulling="2025-10-07 08:36:08.638335119 +0000 UTC m=+2393.598105662" lastFinishedPulling="2025-10-07 08:36:09.708235211 +0000 UTC m=+2394.668005754" observedRunningTime="2025-10-07 08:36:10.735709676 +0000 UTC m=+2395.695480229" watchObservedRunningTime="2025-10-07 08:36:10.739804047 +0000 UTC m=+2395.699574590" Oct 07 08:36:20 crc kubenswrapper[4875]: I1007 08:36:20.697774 4875 scope.go:117] "RemoveContainer" containerID="c61b9f79db1f75afdfdc3343c9fbda35db032a5a8f913522757b64d90f7ef515" Oct 07 08:36:20 crc kubenswrapper[4875]: E1007 08:36:20.698606 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hx68m_openshift-machine-config-operator(3928c10c-c3da-41eb-96b2-629d67cfb31f)\"" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" podUID="3928c10c-c3da-41eb-96b2-629d67cfb31f" Oct 07 08:36:34 crc kubenswrapper[4875]: I1007 08:36:34.698469 4875 scope.go:117] "RemoveContainer" containerID="c61b9f79db1f75afdfdc3343c9fbda35db032a5a8f913522757b64d90f7ef515" Oct 07 08:36:34 crc kubenswrapper[4875]: E1007 08:36:34.701204 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hx68m_openshift-machine-config-operator(3928c10c-c3da-41eb-96b2-629d67cfb31f)\"" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" podUID="3928c10c-c3da-41eb-96b2-629d67cfb31f" Oct 07 08:36:47 crc kubenswrapper[4875]: I1007 08:36:47.698135 4875 scope.go:117] "RemoveContainer" containerID="c61b9f79db1f75afdfdc3343c9fbda35db032a5a8f913522757b64d90f7ef515" Oct 07 08:36:47 crc kubenswrapper[4875]: E1007 08:36:47.701301 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hx68m_openshift-machine-config-operator(3928c10c-c3da-41eb-96b2-629d67cfb31f)\"" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" podUID="3928c10c-c3da-41eb-96b2-629d67cfb31f" Oct 07 08:36:59 crc kubenswrapper[4875]: I1007 08:36:59.698577 4875 scope.go:117] "RemoveContainer" containerID="c61b9f79db1f75afdfdc3343c9fbda35db032a5a8f913522757b64d90f7ef515" Oct 07 08:36:59 crc kubenswrapper[4875]: E1007 08:36:59.699608 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hx68m_openshift-machine-config-operator(3928c10c-c3da-41eb-96b2-629d67cfb31f)\"" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" podUID="3928c10c-c3da-41eb-96b2-629d67cfb31f" Oct 07 08:37:11 crc kubenswrapper[4875]: I1007 08:37:11.698290 4875 scope.go:117] "RemoveContainer" containerID="c61b9f79db1f75afdfdc3343c9fbda35db032a5a8f913522757b64d90f7ef515" Oct 07 08:37:11 crc kubenswrapper[4875]: E1007 08:37:11.699237 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hx68m_openshift-machine-config-operator(3928c10c-c3da-41eb-96b2-629d67cfb31f)\"" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" podUID="3928c10c-c3da-41eb-96b2-629d67cfb31f" Oct 07 08:37:24 crc kubenswrapper[4875]: I1007 08:37:24.697852 4875 scope.go:117] "RemoveContainer" containerID="c61b9f79db1f75afdfdc3343c9fbda35db032a5a8f913522757b64d90f7ef515" Oct 07 08:37:24 crc kubenswrapper[4875]: E1007 08:37:24.698799 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hx68m_openshift-machine-config-operator(3928c10c-c3da-41eb-96b2-629d67cfb31f)\"" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" podUID="3928c10c-c3da-41eb-96b2-629d67cfb31f" Oct 07 08:37:29 crc kubenswrapper[4875]: I1007 08:37:29.183995 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-m8r57"] Oct 07 08:37:29 crc kubenswrapper[4875]: I1007 08:37:29.186627 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-m8r57" Oct 07 08:37:29 crc kubenswrapper[4875]: I1007 08:37:29.210560 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-m8r57"] Oct 07 08:37:29 crc kubenswrapper[4875]: I1007 08:37:29.301982 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f06fae8e-6823-49f6-b016-c98e4f3dabd6-catalog-content\") pod \"redhat-operators-m8r57\" (UID: \"f06fae8e-6823-49f6-b016-c98e4f3dabd6\") " pod="openshift-marketplace/redhat-operators-m8r57" Oct 07 08:37:29 crc kubenswrapper[4875]: I1007 08:37:29.302341 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f06fae8e-6823-49f6-b016-c98e4f3dabd6-utilities\") pod \"redhat-operators-m8r57\" (UID: \"f06fae8e-6823-49f6-b016-c98e4f3dabd6\") " pod="openshift-marketplace/redhat-operators-m8r57" Oct 07 08:37:29 crc kubenswrapper[4875]: I1007 08:37:29.302753 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2djqr\" (UniqueName: \"kubernetes.io/projected/f06fae8e-6823-49f6-b016-c98e4f3dabd6-kube-api-access-2djqr\") pod \"redhat-operators-m8r57\" (UID: \"f06fae8e-6823-49f6-b016-c98e4f3dabd6\") " pod="openshift-marketplace/redhat-operators-m8r57" Oct 07 08:37:29 crc kubenswrapper[4875]: I1007 08:37:29.404464 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f06fae8e-6823-49f6-b016-c98e4f3dabd6-utilities\") pod \"redhat-operators-m8r57\" (UID: \"f06fae8e-6823-49f6-b016-c98e4f3dabd6\") " pod="openshift-marketplace/redhat-operators-m8r57" Oct 07 08:37:29 crc kubenswrapper[4875]: I1007 08:37:29.404574 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2djqr\" (UniqueName: \"kubernetes.io/projected/f06fae8e-6823-49f6-b016-c98e4f3dabd6-kube-api-access-2djqr\") pod \"redhat-operators-m8r57\" (UID: \"f06fae8e-6823-49f6-b016-c98e4f3dabd6\") " pod="openshift-marketplace/redhat-operators-m8r57" Oct 07 08:37:29 crc kubenswrapper[4875]: I1007 08:37:29.404648 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f06fae8e-6823-49f6-b016-c98e4f3dabd6-catalog-content\") pod \"redhat-operators-m8r57\" (UID: \"f06fae8e-6823-49f6-b016-c98e4f3dabd6\") " pod="openshift-marketplace/redhat-operators-m8r57" Oct 07 08:37:29 crc kubenswrapper[4875]: I1007 08:37:29.405234 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f06fae8e-6823-49f6-b016-c98e4f3dabd6-utilities\") pod \"redhat-operators-m8r57\" (UID: \"f06fae8e-6823-49f6-b016-c98e4f3dabd6\") " pod="openshift-marketplace/redhat-operators-m8r57" Oct 07 08:37:29 crc kubenswrapper[4875]: I1007 08:37:29.405255 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f06fae8e-6823-49f6-b016-c98e4f3dabd6-catalog-content\") pod \"redhat-operators-m8r57\" (UID: \"f06fae8e-6823-49f6-b016-c98e4f3dabd6\") " pod="openshift-marketplace/redhat-operators-m8r57" Oct 07 08:37:29 crc kubenswrapper[4875]: I1007 08:37:29.439805 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2djqr\" (UniqueName: \"kubernetes.io/projected/f06fae8e-6823-49f6-b016-c98e4f3dabd6-kube-api-access-2djqr\") pod \"redhat-operators-m8r57\" (UID: \"f06fae8e-6823-49f6-b016-c98e4f3dabd6\") " pod="openshift-marketplace/redhat-operators-m8r57" Oct 07 08:37:29 crc kubenswrapper[4875]: I1007 08:37:29.528770 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-m8r57" Oct 07 08:37:30 crc kubenswrapper[4875]: I1007 08:37:30.052541 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-m8r57"] Oct 07 08:37:30 crc kubenswrapper[4875]: I1007 08:37:30.452484 4875 generic.go:334] "Generic (PLEG): container finished" podID="f06fae8e-6823-49f6-b016-c98e4f3dabd6" containerID="857b2c5e1c8f3ef4bf0e4658dd161e3ef73e219cfc4203b3fd0470d63c27ac4e" exitCode=0 Oct 07 08:37:30 crc kubenswrapper[4875]: I1007 08:37:30.452545 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-m8r57" event={"ID":"f06fae8e-6823-49f6-b016-c98e4f3dabd6","Type":"ContainerDied","Data":"857b2c5e1c8f3ef4bf0e4658dd161e3ef73e219cfc4203b3fd0470d63c27ac4e"} Oct 07 08:37:30 crc kubenswrapper[4875]: I1007 08:37:30.452903 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-m8r57" event={"ID":"f06fae8e-6823-49f6-b016-c98e4f3dabd6","Type":"ContainerStarted","Data":"de8e188251791a5173fd0e60c4e68452a23ac632c0db4ec657d499e4170d4b7a"} Oct 07 08:37:31 crc kubenswrapper[4875]: I1007 08:37:31.462945 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-m8r57" event={"ID":"f06fae8e-6823-49f6-b016-c98e4f3dabd6","Type":"ContainerStarted","Data":"e44b0c7207af71347283891b420a07b94de776061474346037fc856b01b02fc8"} Oct 07 08:37:32 crc kubenswrapper[4875]: I1007 08:37:32.478000 4875 generic.go:334] "Generic (PLEG): container finished" podID="f06fae8e-6823-49f6-b016-c98e4f3dabd6" containerID="e44b0c7207af71347283891b420a07b94de776061474346037fc856b01b02fc8" exitCode=0 Oct 07 08:37:32 crc kubenswrapper[4875]: I1007 08:37:32.478050 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-m8r57" event={"ID":"f06fae8e-6823-49f6-b016-c98e4f3dabd6","Type":"ContainerDied","Data":"e44b0c7207af71347283891b420a07b94de776061474346037fc856b01b02fc8"} Oct 07 08:37:33 crc kubenswrapper[4875]: I1007 08:37:33.491785 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-m8r57" event={"ID":"f06fae8e-6823-49f6-b016-c98e4f3dabd6","Type":"ContainerStarted","Data":"656035ec8d47e0c2d73f2dfe42769baa870c702cbfa55fdfcfa5c949ab3fc6e3"} Oct 07 08:37:33 crc kubenswrapper[4875]: I1007 08:37:33.517226 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-m8r57" podStartSLOduration=1.9425050160000001 podStartE2EDuration="4.517202686s" podCreationTimestamp="2025-10-07 08:37:29 +0000 UTC" firstStartedPulling="2025-10-07 08:37:30.454620974 +0000 UTC m=+2475.414391517" lastFinishedPulling="2025-10-07 08:37:33.029318604 +0000 UTC m=+2477.989089187" observedRunningTime="2025-10-07 08:37:33.511306808 +0000 UTC m=+2478.471077371" watchObservedRunningTime="2025-10-07 08:37:33.517202686 +0000 UTC m=+2478.476973229" Oct 07 08:37:39 crc kubenswrapper[4875]: I1007 08:37:39.529474 4875 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-m8r57" Oct 07 08:37:39 crc kubenswrapper[4875]: I1007 08:37:39.530586 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-m8r57" Oct 07 08:37:39 crc kubenswrapper[4875]: I1007 08:37:39.572939 4875 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-m8r57" Oct 07 08:37:39 crc kubenswrapper[4875]: I1007 08:37:39.697670 4875 scope.go:117] "RemoveContainer" containerID="c61b9f79db1f75afdfdc3343c9fbda35db032a5a8f913522757b64d90f7ef515" Oct 07 08:37:39 crc kubenswrapper[4875]: E1007 08:37:39.698313 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hx68m_openshift-machine-config-operator(3928c10c-c3da-41eb-96b2-629d67cfb31f)\"" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" podUID="3928c10c-c3da-41eb-96b2-629d67cfb31f" Oct 07 08:37:40 crc kubenswrapper[4875]: I1007 08:37:40.589706 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-m8r57" Oct 07 08:37:40 crc kubenswrapper[4875]: I1007 08:37:40.635808 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-m8r57"] Oct 07 08:37:42 crc kubenswrapper[4875]: I1007 08:37:42.566223 4875 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-m8r57" podUID="f06fae8e-6823-49f6-b016-c98e4f3dabd6" containerName="registry-server" containerID="cri-o://656035ec8d47e0c2d73f2dfe42769baa870c702cbfa55fdfcfa5c949ab3fc6e3" gracePeriod=2 Oct 07 08:37:43 crc kubenswrapper[4875]: I1007 08:37:43.190739 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-m8r57" Oct 07 08:37:43 crc kubenswrapper[4875]: I1007 08:37:43.271064 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f06fae8e-6823-49f6-b016-c98e4f3dabd6-utilities\") pod \"f06fae8e-6823-49f6-b016-c98e4f3dabd6\" (UID: \"f06fae8e-6823-49f6-b016-c98e4f3dabd6\") " Oct 07 08:37:43 crc kubenswrapper[4875]: I1007 08:37:43.271283 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2djqr\" (UniqueName: \"kubernetes.io/projected/f06fae8e-6823-49f6-b016-c98e4f3dabd6-kube-api-access-2djqr\") pod \"f06fae8e-6823-49f6-b016-c98e4f3dabd6\" (UID: \"f06fae8e-6823-49f6-b016-c98e4f3dabd6\") " Oct 07 08:37:43 crc kubenswrapper[4875]: I1007 08:37:43.271356 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f06fae8e-6823-49f6-b016-c98e4f3dabd6-catalog-content\") pod \"f06fae8e-6823-49f6-b016-c98e4f3dabd6\" (UID: \"f06fae8e-6823-49f6-b016-c98e4f3dabd6\") " Oct 07 08:37:43 crc kubenswrapper[4875]: I1007 08:37:43.272970 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f06fae8e-6823-49f6-b016-c98e4f3dabd6-utilities" (OuterVolumeSpecName: "utilities") pod "f06fae8e-6823-49f6-b016-c98e4f3dabd6" (UID: "f06fae8e-6823-49f6-b016-c98e4f3dabd6"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 08:37:43 crc kubenswrapper[4875]: I1007 08:37:43.277130 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f06fae8e-6823-49f6-b016-c98e4f3dabd6-kube-api-access-2djqr" (OuterVolumeSpecName: "kube-api-access-2djqr") pod "f06fae8e-6823-49f6-b016-c98e4f3dabd6" (UID: "f06fae8e-6823-49f6-b016-c98e4f3dabd6"). InnerVolumeSpecName "kube-api-access-2djqr". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 08:37:43 crc kubenswrapper[4875]: I1007 08:37:43.356131 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f06fae8e-6823-49f6-b016-c98e4f3dabd6-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f06fae8e-6823-49f6-b016-c98e4f3dabd6" (UID: "f06fae8e-6823-49f6-b016-c98e4f3dabd6"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 08:37:43 crc kubenswrapper[4875]: I1007 08:37:43.373033 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2djqr\" (UniqueName: \"kubernetes.io/projected/f06fae8e-6823-49f6-b016-c98e4f3dabd6-kube-api-access-2djqr\") on node \"crc\" DevicePath \"\"" Oct 07 08:37:43 crc kubenswrapper[4875]: I1007 08:37:43.373067 4875 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f06fae8e-6823-49f6-b016-c98e4f3dabd6-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 07 08:37:43 crc kubenswrapper[4875]: I1007 08:37:43.373079 4875 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f06fae8e-6823-49f6-b016-c98e4f3dabd6-utilities\") on node \"crc\" DevicePath \"\"" Oct 07 08:37:43 crc kubenswrapper[4875]: I1007 08:37:43.576459 4875 generic.go:334] "Generic (PLEG): container finished" podID="f06fae8e-6823-49f6-b016-c98e4f3dabd6" containerID="656035ec8d47e0c2d73f2dfe42769baa870c702cbfa55fdfcfa5c949ab3fc6e3" exitCode=0 Oct 07 08:37:43 crc kubenswrapper[4875]: I1007 08:37:43.576540 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-m8r57" event={"ID":"f06fae8e-6823-49f6-b016-c98e4f3dabd6","Type":"ContainerDied","Data":"656035ec8d47e0c2d73f2dfe42769baa870c702cbfa55fdfcfa5c949ab3fc6e3"} Oct 07 08:37:43 crc kubenswrapper[4875]: I1007 08:37:43.577829 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-m8r57" event={"ID":"f06fae8e-6823-49f6-b016-c98e4f3dabd6","Type":"ContainerDied","Data":"de8e188251791a5173fd0e60c4e68452a23ac632c0db4ec657d499e4170d4b7a"} Oct 07 08:37:43 crc kubenswrapper[4875]: I1007 08:37:43.576573 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-m8r57" Oct 07 08:37:43 crc kubenswrapper[4875]: I1007 08:37:43.577896 4875 scope.go:117] "RemoveContainer" containerID="656035ec8d47e0c2d73f2dfe42769baa870c702cbfa55fdfcfa5c949ab3fc6e3" Oct 07 08:37:43 crc kubenswrapper[4875]: I1007 08:37:43.596747 4875 scope.go:117] "RemoveContainer" containerID="e44b0c7207af71347283891b420a07b94de776061474346037fc856b01b02fc8" Oct 07 08:37:43 crc kubenswrapper[4875]: I1007 08:37:43.609775 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-m8r57"] Oct 07 08:37:43 crc kubenswrapper[4875]: I1007 08:37:43.625073 4875 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-m8r57"] Oct 07 08:37:43 crc kubenswrapper[4875]: I1007 08:37:43.633341 4875 scope.go:117] "RemoveContainer" containerID="857b2c5e1c8f3ef4bf0e4658dd161e3ef73e219cfc4203b3fd0470d63c27ac4e" Oct 07 08:37:43 crc kubenswrapper[4875]: I1007 08:37:43.662953 4875 scope.go:117] "RemoveContainer" containerID="656035ec8d47e0c2d73f2dfe42769baa870c702cbfa55fdfcfa5c949ab3fc6e3" Oct 07 08:37:43 crc kubenswrapper[4875]: E1007 08:37:43.663456 4875 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"656035ec8d47e0c2d73f2dfe42769baa870c702cbfa55fdfcfa5c949ab3fc6e3\": container with ID starting with 656035ec8d47e0c2d73f2dfe42769baa870c702cbfa55fdfcfa5c949ab3fc6e3 not found: ID does not exist" containerID="656035ec8d47e0c2d73f2dfe42769baa870c702cbfa55fdfcfa5c949ab3fc6e3" Oct 07 08:37:43 crc kubenswrapper[4875]: I1007 08:37:43.663500 4875 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"656035ec8d47e0c2d73f2dfe42769baa870c702cbfa55fdfcfa5c949ab3fc6e3"} err="failed to get container status \"656035ec8d47e0c2d73f2dfe42769baa870c702cbfa55fdfcfa5c949ab3fc6e3\": rpc error: code = NotFound desc = could not find container \"656035ec8d47e0c2d73f2dfe42769baa870c702cbfa55fdfcfa5c949ab3fc6e3\": container with ID starting with 656035ec8d47e0c2d73f2dfe42769baa870c702cbfa55fdfcfa5c949ab3fc6e3 not found: ID does not exist" Oct 07 08:37:43 crc kubenswrapper[4875]: I1007 08:37:43.663526 4875 scope.go:117] "RemoveContainer" containerID="e44b0c7207af71347283891b420a07b94de776061474346037fc856b01b02fc8" Oct 07 08:37:43 crc kubenswrapper[4875]: E1007 08:37:43.663871 4875 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e44b0c7207af71347283891b420a07b94de776061474346037fc856b01b02fc8\": container with ID starting with e44b0c7207af71347283891b420a07b94de776061474346037fc856b01b02fc8 not found: ID does not exist" containerID="e44b0c7207af71347283891b420a07b94de776061474346037fc856b01b02fc8" Oct 07 08:37:43 crc kubenswrapper[4875]: I1007 08:37:43.663994 4875 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e44b0c7207af71347283891b420a07b94de776061474346037fc856b01b02fc8"} err="failed to get container status \"e44b0c7207af71347283891b420a07b94de776061474346037fc856b01b02fc8\": rpc error: code = NotFound desc = could not find container \"e44b0c7207af71347283891b420a07b94de776061474346037fc856b01b02fc8\": container with ID starting with e44b0c7207af71347283891b420a07b94de776061474346037fc856b01b02fc8 not found: ID does not exist" Oct 07 08:37:43 crc kubenswrapper[4875]: I1007 08:37:43.664023 4875 scope.go:117] "RemoveContainer" containerID="857b2c5e1c8f3ef4bf0e4658dd161e3ef73e219cfc4203b3fd0470d63c27ac4e" Oct 07 08:37:43 crc kubenswrapper[4875]: E1007 08:37:43.664452 4875 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"857b2c5e1c8f3ef4bf0e4658dd161e3ef73e219cfc4203b3fd0470d63c27ac4e\": container with ID starting with 857b2c5e1c8f3ef4bf0e4658dd161e3ef73e219cfc4203b3fd0470d63c27ac4e not found: ID does not exist" containerID="857b2c5e1c8f3ef4bf0e4658dd161e3ef73e219cfc4203b3fd0470d63c27ac4e" Oct 07 08:37:43 crc kubenswrapper[4875]: I1007 08:37:43.664482 4875 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"857b2c5e1c8f3ef4bf0e4658dd161e3ef73e219cfc4203b3fd0470d63c27ac4e"} err="failed to get container status \"857b2c5e1c8f3ef4bf0e4658dd161e3ef73e219cfc4203b3fd0470d63c27ac4e\": rpc error: code = NotFound desc = could not find container \"857b2c5e1c8f3ef4bf0e4658dd161e3ef73e219cfc4203b3fd0470d63c27ac4e\": container with ID starting with 857b2c5e1c8f3ef4bf0e4658dd161e3ef73e219cfc4203b3fd0470d63c27ac4e not found: ID does not exist" Oct 07 08:37:43 crc kubenswrapper[4875]: I1007 08:37:43.710579 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f06fae8e-6823-49f6-b016-c98e4f3dabd6" path="/var/lib/kubelet/pods/f06fae8e-6823-49f6-b016-c98e4f3dabd6/volumes" Oct 07 08:37:54 crc kubenswrapper[4875]: I1007 08:37:54.697438 4875 scope.go:117] "RemoveContainer" containerID="c61b9f79db1f75afdfdc3343c9fbda35db032a5a8f913522757b64d90f7ef515" Oct 07 08:37:54 crc kubenswrapper[4875]: E1007 08:37:54.698137 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hx68m_openshift-machine-config-operator(3928c10c-c3da-41eb-96b2-629d67cfb31f)\"" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" podUID="3928c10c-c3da-41eb-96b2-629d67cfb31f" Oct 07 08:38:05 crc kubenswrapper[4875]: I1007 08:38:05.706154 4875 scope.go:117] "RemoveContainer" containerID="c61b9f79db1f75afdfdc3343c9fbda35db032a5a8f913522757b64d90f7ef515" Oct 07 08:38:06 crc kubenswrapper[4875]: I1007 08:38:06.791354 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" event={"ID":"3928c10c-c3da-41eb-96b2-629d67cfb31f","Type":"ContainerStarted","Data":"ab9712b6dc10fb7a1edd0937544493d3a1cf8c35c688c4d5cfe0f4459039f33b"} Oct 07 08:39:49 crc kubenswrapper[4875]: I1007 08:39:49.723326 4875 generic.go:334] "Generic (PLEG): container finished" podID="1e54ad8c-e627-4e27-90d1-ea193eb2f42f" containerID="26067902d513c25cdc0802b8bf651613c35200fce88fc3a366a4527ec03ce462" exitCode=0 Oct 07 08:39:49 crc kubenswrapper[4875]: I1007 08:39:49.723384 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-kmx8n" event={"ID":"1e54ad8c-e627-4e27-90d1-ea193eb2f42f","Type":"ContainerDied","Data":"26067902d513c25cdc0802b8bf651613c35200fce88fc3a366a4527ec03ce462"} Oct 07 08:39:51 crc kubenswrapper[4875]: I1007 08:39:51.151749 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-kmx8n" Oct 07 08:39:51 crc kubenswrapper[4875]: I1007 08:39:51.222309 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/1e54ad8c-e627-4e27-90d1-ea193eb2f42f-ssh-key\") pod \"1e54ad8c-e627-4e27-90d1-ea193eb2f42f\" (UID: \"1e54ad8c-e627-4e27-90d1-ea193eb2f42f\") " Oct 07 08:39:51 crc kubenswrapper[4875]: I1007 08:39:51.222397 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/1e54ad8c-e627-4e27-90d1-ea193eb2f42f-nova-cell1-compute-config-0\") pod \"1e54ad8c-e627-4e27-90d1-ea193eb2f42f\" (UID: \"1e54ad8c-e627-4e27-90d1-ea193eb2f42f\") " Oct 07 08:39:51 crc kubenswrapper[4875]: I1007 08:39:51.222442 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/1e54ad8c-e627-4e27-90d1-ea193eb2f42f-nova-extra-config-0\") pod \"1e54ad8c-e627-4e27-90d1-ea193eb2f42f\" (UID: \"1e54ad8c-e627-4e27-90d1-ea193eb2f42f\") " Oct 07 08:39:51 crc kubenswrapper[4875]: I1007 08:39:51.222516 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n2q5z\" (UniqueName: \"kubernetes.io/projected/1e54ad8c-e627-4e27-90d1-ea193eb2f42f-kube-api-access-n2q5z\") pod \"1e54ad8c-e627-4e27-90d1-ea193eb2f42f\" (UID: \"1e54ad8c-e627-4e27-90d1-ea193eb2f42f\") " Oct 07 08:39:51 crc kubenswrapper[4875]: I1007 08:39:51.222552 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1e54ad8c-e627-4e27-90d1-ea193eb2f42f-nova-combined-ca-bundle\") pod \"1e54ad8c-e627-4e27-90d1-ea193eb2f42f\" (UID: \"1e54ad8c-e627-4e27-90d1-ea193eb2f42f\") " Oct 07 08:39:51 crc kubenswrapper[4875]: I1007 08:39:51.223500 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1e54ad8c-e627-4e27-90d1-ea193eb2f42f-inventory\") pod \"1e54ad8c-e627-4e27-90d1-ea193eb2f42f\" (UID: \"1e54ad8c-e627-4e27-90d1-ea193eb2f42f\") " Oct 07 08:39:51 crc kubenswrapper[4875]: I1007 08:39:51.223599 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/1e54ad8c-e627-4e27-90d1-ea193eb2f42f-nova-migration-ssh-key-0\") pod \"1e54ad8c-e627-4e27-90d1-ea193eb2f42f\" (UID: \"1e54ad8c-e627-4e27-90d1-ea193eb2f42f\") " Oct 07 08:39:51 crc kubenswrapper[4875]: I1007 08:39:51.223659 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/1e54ad8c-e627-4e27-90d1-ea193eb2f42f-nova-cell1-compute-config-1\") pod \"1e54ad8c-e627-4e27-90d1-ea193eb2f42f\" (UID: \"1e54ad8c-e627-4e27-90d1-ea193eb2f42f\") " Oct 07 08:39:51 crc kubenswrapper[4875]: I1007 08:39:51.223833 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/1e54ad8c-e627-4e27-90d1-ea193eb2f42f-nova-migration-ssh-key-1\") pod \"1e54ad8c-e627-4e27-90d1-ea193eb2f42f\" (UID: \"1e54ad8c-e627-4e27-90d1-ea193eb2f42f\") " Oct 07 08:39:51 crc kubenswrapper[4875]: I1007 08:39:51.229705 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1e54ad8c-e627-4e27-90d1-ea193eb2f42f-kube-api-access-n2q5z" (OuterVolumeSpecName: "kube-api-access-n2q5z") pod "1e54ad8c-e627-4e27-90d1-ea193eb2f42f" (UID: "1e54ad8c-e627-4e27-90d1-ea193eb2f42f"). InnerVolumeSpecName "kube-api-access-n2q5z". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 08:39:51 crc kubenswrapper[4875]: I1007 08:39:51.230554 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1e54ad8c-e627-4e27-90d1-ea193eb2f42f-nova-combined-ca-bundle" (OuterVolumeSpecName: "nova-combined-ca-bundle") pod "1e54ad8c-e627-4e27-90d1-ea193eb2f42f" (UID: "1e54ad8c-e627-4e27-90d1-ea193eb2f42f"). InnerVolumeSpecName "nova-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:39:51 crc kubenswrapper[4875]: I1007 08:39:51.254446 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1e54ad8c-e627-4e27-90d1-ea193eb2f42f-nova-extra-config-0" (OuterVolumeSpecName: "nova-extra-config-0") pod "1e54ad8c-e627-4e27-90d1-ea193eb2f42f" (UID: "1e54ad8c-e627-4e27-90d1-ea193eb2f42f"). InnerVolumeSpecName "nova-extra-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 08:39:51 crc kubenswrapper[4875]: I1007 08:39:51.257187 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1e54ad8c-e627-4e27-90d1-ea193eb2f42f-nova-cell1-compute-config-0" (OuterVolumeSpecName: "nova-cell1-compute-config-0") pod "1e54ad8c-e627-4e27-90d1-ea193eb2f42f" (UID: "1e54ad8c-e627-4e27-90d1-ea193eb2f42f"). InnerVolumeSpecName "nova-cell1-compute-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:39:51 crc kubenswrapper[4875]: I1007 08:39:51.261038 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1e54ad8c-e627-4e27-90d1-ea193eb2f42f-nova-cell1-compute-config-1" (OuterVolumeSpecName: "nova-cell1-compute-config-1") pod "1e54ad8c-e627-4e27-90d1-ea193eb2f42f" (UID: "1e54ad8c-e627-4e27-90d1-ea193eb2f42f"). InnerVolumeSpecName "nova-cell1-compute-config-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:39:51 crc kubenswrapper[4875]: I1007 08:39:51.261599 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1e54ad8c-e627-4e27-90d1-ea193eb2f42f-inventory" (OuterVolumeSpecName: "inventory") pod "1e54ad8c-e627-4e27-90d1-ea193eb2f42f" (UID: "1e54ad8c-e627-4e27-90d1-ea193eb2f42f"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:39:51 crc kubenswrapper[4875]: I1007 08:39:51.264598 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1e54ad8c-e627-4e27-90d1-ea193eb2f42f-nova-migration-ssh-key-1" (OuterVolumeSpecName: "nova-migration-ssh-key-1") pod "1e54ad8c-e627-4e27-90d1-ea193eb2f42f" (UID: "1e54ad8c-e627-4e27-90d1-ea193eb2f42f"). InnerVolumeSpecName "nova-migration-ssh-key-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:39:51 crc kubenswrapper[4875]: I1007 08:39:51.265368 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1e54ad8c-e627-4e27-90d1-ea193eb2f42f-nova-migration-ssh-key-0" (OuterVolumeSpecName: "nova-migration-ssh-key-0") pod "1e54ad8c-e627-4e27-90d1-ea193eb2f42f" (UID: "1e54ad8c-e627-4e27-90d1-ea193eb2f42f"). InnerVolumeSpecName "nova-migration-ssh-key-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:39:51 crc kubenswrapper[4875]: I1007 08:39:51.284982 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1e54ad8c-e627-4e27-90d1-ea193eb2f42f-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "1e54ad8c-e627-4e27-90d1-ea193eb2f42f" (UID: "1e54ad8c-e627-4e27-90d1-ea193eb2f42f"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:39:51 crc kubenswrapper[4875]: I1007 08:39:51.326520 4875 reconciler_common.go:293] "Volume detached for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/1e54ad8c-e627-4e27-90d1-ea193eb2f42f-nova-extra-config-0\") on node \"crc\" DevicePath \"\"" Oct 07 08:39:51 crc kubenswrapper[4875]: I1007 08:39:51.326550 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n2q5z\" (UniqueName: \"kubernetes.io/projected/1e54ad8c-e627-4e27-90d1-ea193eb2f42f-kube-api-access-n2q5z\") on node \"crc\" DevicePath \"\"" Oct 07 08:39:51 crc kubenswrapper[4875]: I1007 08:39:51.326562 4875 reconciler_common.go:293] "Volume detached for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1e54ad8c-e627-4e27-90d1-ea193eb2f42f-nova-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 08:39:51 crc kubenswrapper[4875]: I1007 08:39:51.326571 4875 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1e54ad8c-e627-4e27-90d1-ea193eb2f42f-inventory\") on node \"crc\" DevicePath \"\"" Oct 07 08:39:51 crc kubenswrapper[4875]: I1007 08:39:51.326579 4875 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/1e54ad8c-e627-4e27-90d1-ea193eb2f42f-nova-migration-ssh-key-0\") on node \"crc\" DevicePath \"\"" Oct 07 08:39:51 crc kubenswrapper[4875]: I1007 08:39:51.326587 4875 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/1e54ad8c-e627-4e27-90d1-ea193eb2f42f-nova-cell1-compute-config-1\") on node \"crc\" DevicePath \"\"" Oct 07 08:39:51 crc kubenswrapper[4875]: I1007 08:39:51.326595 4875 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/1e54ad8c-e627-4e27-90d1-ea193eb2f42f-nova-migration-ssh-key-1\") on node \"crc\" DevicePath \"\"" Oct 07 08:39:51 crc kubenswrapper[4875]: I1007 08:39:51.326603 4875 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/1e54ad8c-e627-4e27-90d1-ea193eb2f42f-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 07 08:39:51 crc kubenswrapper[4875]: I1007 08:39:51.326613 4875 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/1e54ad8c-e627-4e27-90d1-ea193eb2f42f-nova-cell1-compute-config-0\") on node \"crc\" DevicePath \"\"" Oct 07 08:39:51 crc kubenswrapper[4875]: I1007 08:39:51.742208 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-kmx8n" event={"ID":"1e54ad8c-e627-4e27-90d1-ea193eb2f42f","Type":"ContainerDied","Data":"3f0da03b6b841fdc31b5f095de6190069a225e1be1402a807878f04b3c9b7696"} Oct 07 08:39:51 crc kubenswrapper[4875]: I1007 08:39:51.742257 4875 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3f0da03b6b841fdc31b5f095de6190069a225e1be1402a807878f04b3c9b7696" Oct 07 08:39:51 crc kubenswrapper[4875]: I1007 08:39:51.742303 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-kmx8n" Oct 07 08:39:51 crc kubenswrapper[4875]: I1007 08:39:51.884239 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-bm42x"] Oct 07 08:39:51 crc kubenswrapper[4875]: E1007 08:39:51.884659 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f06fae8e-6823-49f6-b016-c98e4f3dabd6" containerName="extract-content" Oct 07 08:39:51 crc kubenswrapper[4875]: I1007 08:39:51.884674 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="f06fae8e-6823-49f6-b016-c98e4f3dabd6" containerName="extract-content" Oct 07 08:39:51 crc kubenswrapper[4875]: E1007 08:39:51.884694 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f06fae8e-6823-49f6-b016-c98e4f3dabd6" containerName="registry-server" Oct 07 08:39:51 crc kubenswrapper[4875]: I1007 08:39:51.884701 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="f06fae8e-6823-49f6-b016-c98e4f3dabd6" containerName="registry-server" Oct 07 08:39:51 crc kubenswrapper[4875]: E1007 08:39:51.884723 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1e54ad8c-e627-4e27-90d1-ea193eb2f42f" containerName="nova-edpm-deployment-openstack-edpm-ipam" Oct 07 08:39:51 crc kubenswrapper[4875]: I1007 08:39:51.884729 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="1e54ad8c-e627-4e27-90d1-ea193eb2f42f" containerName="nova-edpm-deployment-openstack-edpm-ipam" Oct 07 08:39:51 crc kubenswrapper[4875]: E1007 08:39:51.884748 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f06fae8e-6823-49f6-b016-c98e4f3dabd6" containerName="extract-utilities" Oct 07 08:39:51 crc kubenswrapper[4875]: I1007 08:39:51.884761 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="f06fae8e-6823-49f6-b016-c98e4f3dabd6" containerName="extract-utilities" Oct 07 08:39:51 crc kubenswrapper[4875]: I1007 08:39:51.884973 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="f06fae8e-6823-49f6-b016-c98e4f3dabd6" containerName="registry-server" Oct 07 08:39:51 crc kubenswrapper[4875]: I1007 08:39:51.885008 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="1e54ad8c-e627-4e27-90d1-ea193eb2f42f" containerName="nova-edpm-deployment-openstack-edpm-ipam" Oct 07 08:39:51 crc kubenswrapper[4875]: I1007 08:39:51.885741 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-bm42x" Oct 07 08:39:51 crc kubenswrapper[4875]: I1007 08:39:51.888248 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 07 08:39:51 crc kubenswrapper[4875]: I1007 08:39:51.888295 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 07 08:39:51 crc kubenswrapper[4875]: I1007 08:39:51.888974 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-compute-config-data" Oct 07 08:39:51 crc kubenswrapper[4875]: I1007 08:39:51.889154 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-bl4pq" Oct 07 08:39:51 crc kubenswrapper[4875]: I1007 08:39:51.894015 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 07 08:39:51 crc kubenswrapper[4875]: I1007 08:39:51.894243 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-bm42x"] Oct 07 08:39:52 crc kubenswrapper[4875]: I1007 08:39:52.039848 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b06f38f5-d4e4-4de8-aab3-f171fc82d880-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-bm42x\" (UID: \"b06f38f5-d4e4-4de8-aab3-f171fc82d880\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-bm42x" Oct 07 08:39:52 crc kubenswrapper[4875]: I1007 08:39:52.039915 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l2ccr\" (UniqueName: \"kubernetes.io/projected/b06f38f5-d4e4-4de8-aab3-f171fc82d880-kube-api-access-l2ccr\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-bm42x\" (UID: \"b06f38f5-d4e4-4de8-aab3-f171fc82d880\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-bm42x" Oct 07 08:39:52 crc kubenswrapper[4875]: I1007 08:39:52.039959 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/b06f38f5-d4e4-4de8-aab3-f171fc82d880-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-bm42x\" (UID: \"b06f38f5-d4e4-4de8-aab3-f171fc82d880\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-bm42x" Oct 07 08:39:52 crc kubenswrapper[4875]: I1007 08:39:52.039984 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/b06f38f5-d4e4-4de8-aab3-f171fc82d880-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-bm42x\" (UID: \"b06f38f5-d4e4-4de8-aab3-f171fc82d880\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-bm42x" Oct 07 08:39:52 crc kubenswrapper[4875]: I1007 08:39:52.040005 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b06f38f5-d4e4-4de8-aab3-f171fc82d880-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-bm42x\" (UID: \"b06f38f5-d4e4-4de8-aab3-f171fc82d880\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-bm42x" Oct 07 08:39:52 crc kubenswrapper[4875]: I1007 08:39:52.040051 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/b06f38f5-d4e4-4de8-aab3-f171fc82d880-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-bm42x\" (UID: \"b06f38f5-d4e4-4de8-aab3-f171fc82d880\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-bm42x" Oct 07 08:39:52 crc kubenswrapper[4875]: I1007 08:39:52.040090 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b06f38f5-d4e4-4de8-aab3-f171fc82d880-ssh-key\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-bm42x\" (UID: \"b06f38f5-d4e4-4de8-aab3-f171fc82d880\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-bm42x" Oct 07 08:39:52 crc kubenswrapper[4875]: I1007 08:39:52.141494 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b06f38f5-d4e4-4de8-aab3-f171fc82d880-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-bm42x\" (UID: \"b06f38f5-d4e4-4de8-aab3-f171fc82d880\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-bm42x" Oct 07 08:39:52 crc kubenswrapper[4875]: I1007 08:39:52.141781 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l2ccr\" (UniqueName: \"kubernetes.io/projected/b06f38f5-d4e4-4de8-aab3-f171fc82d880-kube-api-access-l2ccr\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-bm42x\" (UID: \"b06f38f5-d4e4-4de8-aab3-f171fc82d880\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-bm42x" Oct 07 08:39:52 crc kubenswrapper[4875]: I1007 08:39:52.141895 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/b06f38f5-d4e4-4de8-aab3-f171fc82d880-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-bm42x\" (UID: \"b06f38f5-d4e4-4de8-aab3-f171fc82d880\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-bm42x" Oct 07 08:39:52 crc kubenswrapper[4875]: I1007 08:39:52.141979 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/b06f38f5-d4e4-4de8-aab3-f171fc82d880-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-bm42x\" (UID: \"b06f38f5-d4e4-4de8-aab3-f171fc82d880\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-bm42x" Oct 07 08:39:52 crc kubenswrapper[4875]: I1007 08:39:52.142068 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b06f38f5-d4e4-4de8-aab3-f171fc82d880-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-bm42x\" (UID: \"b06f38f5-d4e4-4de8-aab3-f171fc82d880\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-bm42x" Oct 07 08:39:52 crc kubenswrapper[4875]: I1007 08:39:52.142209 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/b06f38f5-d4e4-4de8-aab3-f171fc82d880-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-bm42x\" (UID: \"b06f38f5-d4e4-4de8-aab3-f171fc82d880\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-bm42x" Oct 07 08:39:52 crc kubenswrapper[4875]: I1007 08:39:52.142765 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b06f38f5-d4e4-4de8-aab3-f171fc82d880-ssh-key\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-bm42x\" (UID: \"b06f38f5-d4e4-4de8-aab3-f171fc82d880\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-bm42x" Oct 07 08:39:52 crc kubenswrapper[4875]: I1007 08:39:52.145196 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/b06f38f5-d4e4-4de8-aab3-f171fc82d880-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-bm42x\" (UID: \"b06f38f5-d4e4-4de8-aab3-f171fc82d880\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-bm42x" Oct 07 08:39:52 crc kubenswrapper[4875]: I1007 08:39:52.145674 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/b06f38f5-d4e4-4de8-aab3-f171fc82d880-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-bm42x\" (UID: \"b06f38f5-d4e4-4de8-aab3-f171fc82d880\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-bm42x" Oct 07 08:39:52 crc kubenswrapper[4875]: I1007 08:39:52.146035 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b06f38f5-d4e4-4de8-aab3-f171fc82d880-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-bm42x\" (UID: \"b06f38f5-d4e4-4de8-aab3-f171fc82d880\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-bm42x" Oct 07 08:39:52 crc kubenswrapper[4875]: I1007 08:39:52.147029 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/b06f38f5-d4e4-4de8-aab3-f171fc82d880-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-bm42x\" (UID: \"b06f38f5-d4e4-4de8-aab3-f171fc82d880\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-bm42x" Oct 07 08:39:52 crc kubenswrapper[4875]: I1007 08:39:52.147215 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b06f38f5-d4e4-4de8-aab3-f171fc82d880-ssh-key\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-bm42x\" (UID: \"b06f38f5-d4e4-4de8-aab3-f171fc82d880\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-bm42x" Oct 07 08:39:52 crc kubenswrapper[4875]: I1007 08:39:52.154392 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b06f38f5-d4e4-4de8-aab3-f171fc82d880-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-bm42x\" (UID: \"b06f38f5-d4e4-4de8-aab3-f171fc82d880\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-bm42x" Oct 07 08:39:52 crc kubenswrapper[4875]: I1007 08:39:52.160372 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l2ccr\" (UniqueName: \"kubernetes.io/projected/b06f38f5-d4e4-4de8-aab3-f171fc82d880-kube-api-access-l2ccr\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-bm42x\" (UID: \"b06f38f5-d4e4-4de8-aab3-f171fc82d880\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-bm42x" Oct 07 08:39:52 crc kubenswrapper[4875]: I1007 08:39:52.201804 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-bm42x" Oct 07 08:39:52 crc kubenswrapper[4875]: I1007 08:39:52.700445 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-bm42x"] Oct 07 08:39:52 crc kubenswrapper[4875]: W1007 08:39:52.706511 4875 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb06f38f5_d4e4_4de8_aab3_f171fc82d880.slice/crio-be513bde4a9328c94d46d9978dfcf5f1b41522c33b8061257e1e615a4f8f3d71 WatchSource:0}: Error finding container be513bde4a9328c94d46d9978dfcf5f1b41522c33b8061257e1e615a4f8f3d71: Status 404 returned error can't find the container with id be513bde4a9328c94d46d9978dfcf5f1b41522c33b8061257e1e615a4f8f3d71 Oct 07 08:39:52 crc kubenswrapper[4875]: I1007 08:39:52.751394 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-bm42x" event={"ID":"b06f38f5-d4e4-4de8-aab3-f171fc82d880","Type":"ContainerStarted","Data":"be513bde4a9328c94d46d9978dfcf5f1b41522c33b8061257e1e615a4f8f3d71"} Oct 07 08:39:54 crc kubenswrapper[4875]: I1007 08:39:54.781163 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-bm42x" event={"ID":"b06f38f5-d4e4-4de8-aab3-f171fc82d880","Type":"ContainerStarted","Data":"4ae76d27fff54c6ea8e167e8631ec7f45b9509413ec082df8b5065e069e04dd1"} Oct 07 08:39:54 crc kubenswrapper[4875]: I1007 08:39:54.813076 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-bm42x" podStartSLOduration=2.976256673 podStartE2EDuration="3.813054326s" podCreationTimestamp="2025-10-07 08:39:51 +0000 UTC" firstStartedPulling="2025-10-07 08:39:52.709668667 +0000 UTC m=+2617.669439210" lastFinishedPulling="2025-10-07 08:39:53.54646632 +0000 UTC m=+2618.506236863" observedRunningTime="2025-10-07 08:39:54.798549263 +0000 UTC m=+2619.758319806" watchObservedRunningTime="2025-10-07 08:39:54.813054326 +0000 UTC m=+2619.772824869" Oct 07 08:40:31 crc kubenswrapper[4875]: I1007 08:40:31.220650 4875 patch_prober.go:28] interesting pod/machine-config-daemon-hx68m container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 07 08:40:31 crc kubenswrapper[4875]: I1007 08:40:31.222019 4875 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" podUID="3928c10c-c3da-41eb-96b2-629d67cfb31f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 07 08:41:01 crc kubenswrapper[4875]: I1007 08:41:01.221432 4875 patch_prober.go:28] interesting pod/machine-config-daemon-hx68m container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 07 08:41:01 crc kubenswrapper[4875]: I1007 08:41:01.222073 4875 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" podUID="3928c10c-c3da-41eb-96b2-629d67cfb31f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 07 08:41:31 crc kubenswrapper[4875]: I1007 08:41:31.220812 4875 patch_prober.go:28] interesting pod/machine-config-daemon-hx68m container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 07 08:41:31 crc kubenswrapper[4875]: I1007 08:41:31.221481 4875 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" podUID="3928c10c-c3da-41eb-96b2-629d67cfb31f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 07 08:41:31 crc kubenswrapper[4875]: I1007 08:41:31.221537 4875 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" Oct 07 08:41:31 crc kubenswrapper[4875]: I1007 08:41:31.222370 4875 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"ab9712b6dc10fb7a1edd0937544493d3a1cf8c35c688c4d5cfe0f4459039f33b"} pod="openshift-machine-config-operator/machine-config-daemon-hx68m" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 07 08:41:31 crc kubenswrapper[4875]: I1007 08:41:31.222437 4875 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" podUID="3928c10c-c3da-41eb-96b2-629d67cfb31f" containerName="machine-config-daemon" containerID="cri-o://ab9712b6dc10fb7a1edd0937544493d3a1cf8c35c688c4d5cfe0f4459039f33b" gracePeriod=600 Oct 07 08:41:31 crc kubenswrapper[4875]: I1007 08:41:31.706084 4875 generic.go:334] "Generic (PLEG): container finished" podID="3928c10c-c3da-41eb-96b2-629d67cfb31f" containerID="ab9712b6dc10fb7a1edd0937544493d3a1cf8c35c688c4d5cfe0f4459039f33b" exitCode=0 Oct 07 08:41:31 crc kubenswrapper[4875]: I1007 08:41:31.707041 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" event={"ID":"3928c10c-c3da-41eb-96b2-629d67cfb31f","Type":"ContainerDied","Data":"ab9712b6dc10fb7a1edd0937544493d3a1cf8c35c688c4d5cfe0f4459039f33b"} Oct 07 08:41:31 crc kubenswrapper[4875]: I1007 08:41:31.707088 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" event={"ID":"3928c10c-c3da-41eb-96b2-629d67cfb31f","Type":"ContainerStarted","Data":"fd0786c86e596eee0132032615ed75314bf901a7421dc6afedda1f75eccd9ec4"} Oct 07 08:41:31 crc kubenswrapper[4875]: I1007 08:41:31.707106 4875 scope.go:117] "RemoveContainer" containerID="c61b9f79db1f75afdfdc3343c9fbda35db032a5a8f913522757b64d90f7ef515" Oct 07 08:42:34 crc kubenswrapper[4875]: I1007 08:42:34.255930 4875 generic.go:334] "Generic (PLEG): container finished" podID="b06f38f5-d4e4-4de8-aab3-f171fc82d880" containerID="4ae76d27fff54c6ea8e167e8631ec7f45b9509413ec082df8b5065e069e04dd1" exitCode=0 Oct 07 08:42:34 crc kubenswrapper[4875]: I1007 08:42:34.256006 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-bm42x" event={"ID":"b06f38f5-d4e4-4de8-aab3-f171fc82d880","Type":"ContainerDied","Data":"4ae76d27fff54c6ea8e167e8631ec7f45b9509413ec082df8b5065e069e04dd1"} Oct 07 08:42:35 crc kubenswrapper[4875]: I1007 08:42:35.730400 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-bm42x" Oct 07 08:42:35 crc kubenswrapper[4875]: I1007 08:42:35.872593 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b06f38f5-d4e4-4de8-aab3-f171fc82d880-ssh-key\") pod \"b06f38f5-d4e4-4de8-aab3-f171fc82d880\" (UID: \"b06f38f5-d4e4-4de8-aab3-f171fc82d880\") " Oct 07 08:42:35 crc kubenswrapper[4875]: I1007 08:42:35.872662 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b06f38f5-d4e4-4de8-aab3-f171fc82d880-inventory\") pod \"b06f38f5-d4e4-4de8-aab3-f171fc82d880\" (UID: \"b06f38f5-d4e4-4de8-aab3-f171fc82d880\") " Oct 07 08:42:35 crc kubenswrapper[4875]: I1007 08:42:35.872860 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/b06f38f5-d4e4-4de8-aab3-f171fc82d880-ceilometer-compute-config-data-0\") pod \"b06f38f5-d4e4-4de8-aab3-f171fc82d880\" (UID: \"b06f38f5-d4e4-4de8-aab3-f171fc82d880\") " Oct 07 08:42:35 crc kubenswrapper[4875]: I1007 08:42:35.872917 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/b06f38f5-d4e4-4de8-aab3-f171fc82d880-ceilometer-compute-config-data-2\") pod \"b06f38f5-d4e4-4de8-aab3-f171fc82d880\" (UID: \"b06f38f5-d4e4-4de8-aab3-f171fc82d880\") " Oct 07 08:42:35 crc kubenswrapper[4875]: I1007 08:42:35.872942 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l2ccr\" (UniqueName: \"kubernetes.io/projected/b06f38f5-d4e4-4de8-aab3-f171fc82d880-kube-api-access-l2ccr\") pod \"b06f38f5-d4e4-4de8-aab3-f171fc82d880\" (UID: \"b06f38f5-d4e4-4de8-aab3-f171fc82d880\") " Oct 07 08:42:35 crc kubenswrapper[4875]: I1007 08:42:35.872965 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/b06f38f5-d4e4-4de8-aab3-f171fc82d880-ceilometer-compute-config-data-1\") pod \"b06f38f5-d4e4-4de8-aab3-f171fc82d880\" (UID: \"b06f38f5-d4e4-4de8-aab3-f171fc82d880\") " Oct 07 08:42:35 crc kubenswrapper[4875]: I1007 08:42:35.873012 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b06f38f5-d4e4-4de8-aab3-f171fc82d880-telemetry-combined-ca-bundle\") pod \"b06f38f5-d4e4-4de8-aab3-f171fc82d880\" (UID: \"b06f38f5-d4e4-4de8-aab3-f171fc82d880\") " Oct 07 08:42:35 crc kubenswrapper[4875]: I1007 08:42:35.878333 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b06f38f5-d4e4-4de8-aab3-f171fc82d880-kube-api-access-l2ccr" (OuterVolumeSpecName: "kube-api-access-l2ccr") pod "b06f38f5-d4e4-4de8-aab3-f171fc82d880" (UID: "b06f38f5-d4e4-4de8-aab3-f171fc82d880"). InnerVolumeSpecName "kube-api-access-l2ccr". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 08:42:35 crc kubenswrapper[4875]: I1007 08:42:35.879986 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b06f38f5-d4e4-4de8-aab3-f171fc82d880-telemetry-combined-ca-bundle" (OuterVolumeSpecName: "telemetry-combined-ca-bundle") pod "b06f38f5-d4e4-4de8-aab3-f171fc82d880" (UID: "b06f38f5-d4e4-4de8-aab3-f171fc82d880"). InnerVolumeSpecName "telemetry-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:42:35 crc kubenswrapper[4875]: I1007 08:42:35.901603 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b06f38f5-d4e4-4de8-aab3-f171fc82d880-ceilometer-compute-config-data-2" (OuterVolumeSpecName: "ceilometer-compute-config-data-2") pod "b06f38f5-d4e4-4de8-aab3-f171fc82d880" (UID: "b06f38f5-d4e4-4de8-aab3-f171fc82d880"). InnerVolumeSpecName "ceilometer-compute-config-data-2". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:42:35 crc kubenswrapper[4875]: I1007 08:42:35.902824 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b06f38f5-d4e4-4de8-aab3-f171fc82d880-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "b06f38f5-d4e4-4de8-aab3-f171fc82d880" (UID: "b06f38f5-d4e4-4de8-aab3-f171fc82d880"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:42:35 crc kubenswrapper[4875]: E1007 08:42:35.906169 4875 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b06f38f5-d4e4-4de8-aab3-f171fc82d880-ceilometer-compute-config-data-1 podName:b06f38f5-d4e4-4de8-aab3-f171fc82d880 nodeName:}" failed. No retries permitted until 2025-10-07 08:42:36.40614677 +0000 UTC m=+2781.365917313 (durationBeforeRetry 500ms). Error: error cleaning subPath mounts for volume "ceilometer-compute-config-data-1" (UniqueName: "kubernetes.io/secret/b06f38f5-d4e4-4de8-aab3-f171fc82d880-ceilometer-compute-config-data-1") pod "b06f38f5-d4e4-4de8-aab3-f171fc82d880" (UID: "b06f38f5-d4e4-4de8-aab3-f171fc82d880") : error deleting /var/lib/kubelet/pods/b06f38f5-d4e4-4de8-aab3-f171fc82d880/volume-subpaths: remove /var/lib/kubelet/pods/b06f38f5-d4e4-4de8-aab3-f171fc82d880/volume-subpaths: no such file or directory Oct 07 08:42:35 crc kubenswrapper[4875]: I1007 08:42:35.907469 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b06f38f5-d4e4-4de8-aab3-f171fc82d880-inventory" (OuterVolumeSpecName: "inventory") pod "b06f38f5-d4e4-4de8-aab3-f171fc82d880" (UID: "b06f38f5-d4e4-4de8-aab3-f171fc82d880"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:42:35 crc kubenswrapper[4875]: I1007 08:42:35.908544 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b06f38f5-d4e4-4de8-aab3-f171fc82d880-ceilometer-compute-config-data-0" (OuterVolumeSpecName: "ceilometer-compute-config-data-0") pod "b06f38f5-d4e4-4de8-aab3-f171fc82d880" (UID: "b06f38f5-d4e4-4de8-aab3-f171fc82d880"). InnerVolumeSpecName "ceilometer-compute-config-data-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:42:35 crc kubenswrapper[4875]: I1007 08:42:35.975672 4875 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/b06f38f5-d4e4-4de8-aab3-f171fc82d880-ceilometer-compute-config-data-0\") on node \"crc\" DevicePath \"\"" Oct 07 08:42:35 crc kubenswrapper[4875]: I1007 08:42:35.976002 4875 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/b06f38f5-d4e4-4de8-aab3-f171fc82d880-ceilometer-compute-config-data-2\") on node \"crc\" DevicePath \"\"" Oct 07 08:42:35 crc kubenswrapper[4875]: I1007 08:42:35.976095 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l2ccr\" (UniqueName: \"kubernetes.io/projected/b06f38f5-d4e4-4de8-aab3-f171fc82d880-kube-api-access-l2ccr\") on node \"crc\" DevicePath \"\"" Oct 07 08:42:35 crc kubenswrapper[4875]: I1007 08:42:35.976192 4875 reconciler_common.go:293] "Volume detached for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b06f38f5-d4e4-4de8-aab3-f171fc82d880-telemetry-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 08:42:35 crc kubenswrapper[4875]: I1007 08:42:35.976272 4875 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b06f38f5-d4e4-4de8-aab3-f171fc82d880-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 07 08:42:35 crc kubenswrapper[4875]: I1007 08:42:35.976359 4875 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b06f38f5-d4e4-4de8-aab3-f171fc82d880-inventory\") on node \"crc\" DevicePath \"\"" Oct 07 08:42:36 crc kubenswrapper[4875]: I1007 08:42:36.281348 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-bm42x" event={"ID":"b06f38f5-d4e4-4de8-aab3-f171fc82d880","Type":"ContainerDied","Data":"be513bde4a9328c94d46d9978dfcf5f1b41522c33b8061257e1e615a4f8f3d71"} Oct 07 08:42:36 crc kubenswrapper[4875]: I1007 08:42:36.281416 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-bm42x" Oct 07 08:42:36 crc kubenswrapper[4875]: I1007 08:42:36.281435 4875 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="be513bde4a9328c94d46d9978dfcf5f1b41522c33b8061257e1e615a4f8f3d71" Oct 07 08:42:36 crc kubenswrapper[4875]: I1007 08:42:36.484705 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/b06f38f5-d4e4-4de8-aab3-f171fc82d880-ceilometer-compute-config-data-1\") pod \"b06f38f5-d4e4-4de8-aab3-f171fc82d880\" (UID: \"b06f38f5-d4e4-4de8-aab3-f171fc82d880\") " Oct 07 08:42:36 crc kubenswrapper[4875]: I1007 08:42:36.488696 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b06f38f5-d4e4-4de8-aab3-f171fc82d880-ceilometer-compute-config-data-1" (OuterVolumeSpecName: "ceilometer-compute-config-data-1") pod "b06f38f5-d4e4-4de8-aab3-f171fc82d880" (UID: "b06f38f5-d4e4-4de8-aab3-f171fc82d880"). InnerVolumeSpecName "ceilometer-compute-config-data-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:42:36 crc kubenswrapper[4875]: I1007 08:42:36.588987 4875 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/b06f38f5-d4e4-4de8-aab3-f171fc82d880-ceilometer-compute-config-data-1\") on node \"crc\" DevicePath \"\"" Oct 07 08:42:36 crc kubenswrapper[4875]: E1007 08:42:36.802178 4875 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb06f38f5_d4e4_4de8_aab3_f171fc82d880.slice/crio-be513bde4a9328c94d46d9978dfcf5f1b41522c33b8061257e1e615a4f8f3d71\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb06f38f5_d4e4_4de8_aab3_f171fc82d880.slice\": RecentStats: unable to find data in memory cache]" Oct 07 08:43:14 crc kubenswrapper[4875]: I1007 08:43:14.344550 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-d8d25"] Oct 07 08:43:14 crc kubenswrapper[4875]: E1007 08:43:14.345424 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b06f38f5-d4e4-4de8-aab3-f171fc82d880" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Oct 07 08:43:14 crc kubenswrapper[4875]: I1007 08:43:14.345436 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="b06f38f5-d4e4-4de8-aab3-f171fc82d880" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Oct 07 08:43:14 crc kubenswrapper[4875]: I1007 08:43:14.345642 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="b06f38f5-d4e4-4de8-aab3-f171fc82d880" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Oct 07 08:43:14 crc kubenswrapper[4875]: I1007 08:43:14.347147 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-d8d25" Oct 07 08:43:14 crc kubenswrapper[4875]: I1007 08:43:14.363539 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-d8d25"] Oct 07 08:43:14 crc kubenswrapper[4875]: I1007 08:43:14.426564 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-55dbc\" (UniqueName: \"kubernetes.io/projected/ebe6d65b-b429-45fa-8669-4a82c83ed677-kube-api-access-55dbc\") pod \"certified-operators-d8d25\" (UID: \"ebe6d65b-b429-45fa-8669-4a82c83ed677\") " pod="openshift-marketplace/certified-operators-d8d25" Oct 07 08:43:14 crc kubenswrapper[4875]: I1007 08:43:14.426662 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ebe6d65b-b429-45fa-8669-4a82c83ed677-catalog-content\") pod \"certified-operators-d8d25\" (UID: \"ebe6d65b-b429-45fa-8669-4a82c83ed677\") " pod="openshift-marketplace/certified-operators-d8d25" Oct 07 08:43:14 crc kubenswrapper[4875]: I1007 08:43:14.426709 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ebe6d65b-b429-45fa-8669-4a82c83ed677-utilities\") pod \"certified-operators-d8d25\" (UID: \"ebe6d65b-b429-45fa-8669-4a82c83ed677\") " pod="openshift-marketplace/certified-operators-d8d25" Oct 07 08:43:14 crc kubenswrapper[4875]: I1007 08:43:14.528294 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-55dbc\" (UniqueName: \"kubernetes.io/projected/ebe6d65b-b429-45fa-8669-4a82c83ed677-kube-api-access-55dbc\") pod \"certified-operators-d8d25\" (UID: \"ebe6d65b-b429-45fa-8669-4a82c83ed677\") " pod="openshift-marketplace/certified-operators-d8d25" Oct 07 08:43:14 crc kubenswrapper[4875]: I1007 08:43:14.528377 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ebe6d65b-b429-45fa-8669-4a82c83ed677-catalog-content\") pod \"certified-operators-d8d25\" (UID: \"ebe6d65b-b429-45fa-8669-4a82c83ed677\") " pod="openshift-marketplace/certified-operators-d8d25" Oct 07 08:43:14 crc kubenswrapper[4875]: I1007 08:43:14.528422 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ebe6d65b-b429-45fa-8669-4a82c83ed677-utilities\") pod \"certified-operators-d8d25\" (UID: \"ebe6d65b-b429-45fa-8669-4a82c83ed677\") " pod="openshift-marketplace/certified-operators-d8d25" Oct 07 08:43:14 crc kubenswrapper[4875]: I1007 08:43:14.528949 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ebe6d65b-b429-45fa-8669-4a82c83ed677-catalog-content\") pod \"certified-operators-d8d25\" (UID: \"ebe6d65b-b429-45fa-8669-4a82c83ed677\") " pod="openshift-marketplace/certified-operators-d8d25" Oct 07 08:43:14 crc kubenswrapper[4875]: I1007 08:43:14.528985 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ebe6d65b-b429-45fa-8669-4a82c83ed677-utilities\") pod \"certified-operators-d8d25\" (UID: \"ebe6d65b-b429-45fa-8669-4a82c83ed677\") " pod="openshift-marketplace/certified-operators-d8d25" Oct 07 08:43:14 crc kubenswrapper[4875]: I1007 08:43:14.548350 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-55dbc\" (UniqueName: \"kubernetes.io/projected/ebe6d65b-b429-45fa-8669-4a82c83ed677-kube-api-access-55dbc\") pod \"certified-operators-d8d25\" (UID: \"ebe6d65b-b429-45fa-8669-4a82c83ed677\") " pod="openshift-marketplace/certified-operators-d8d25" Oct 07 08:43:14 crc kubenswrapper[4875]: I1007 08:43:14.686095 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-d8d25" Oct 07 08:43:15 crc kubenswrapper[4875]: I1007 08:43:15.274653 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-d8d25"] Oct 07 08:43:15 crc kubenswrapper[4875]: I1007 08:43:15.642981 4875 generic.go:334] "Generic (PLEG): container finished" podID="ebe6d65b-b429-45fa-8669-4a82c83ed677" containerID="cfae90d1a9ca31931646e35a39a7b98868cb157795b7c809da7ac0a9567285e4" exitCode=0 Oct 07 08:43:15 crc kubenswrapper[4875]: I1007 08:43:15.643041 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-d8d25" event={"ID":"ebe6d65b-b429-45fa-8669-4a82c83ed677","Type":"ContainerDied","Data":"cfae90d1a9ca31931646e35a39a7b98868cb157795b7c809da7ac0a9567285e4"} Oct 07 08:43:15 crc kubenswrapper[4875]: I1007 08:43:15.643064 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-d8d25" event={"ID":"ebe6d65b-b429-45fa-8669-4a82c83ed677","Type":"ContainerStarted","Data":"47f27776e8247e3afa1bb653640a0b3732913af6bce3eb6835515b4ccfa2ac29"} Oct 07 08:43:15 crc kubenswrapper[4875]: I1007 08:43:15.647055 4875 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 07 08:43:16 crc kubenswrapper[4875]: I1007 08:43:16.652559 4875 generic.go:334] "Generic (PLEG): container finished" podID="ebe6d65b-b429-45fa-8669-4a82c83ed677" containerID="7d0e8a7d58eb6a03f390e468eb1ad7e52b5d487715e023ae80a522bca1ca18cb" exitCode=0 Oct 07 08:43:16 crc kubenswrapper[4875]: I1007 08:43:16.652640 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-d8d25" event={"ID":"ebe6d65b-b429-45fa-8669-4a82c83ed677","Type":"ContainerDied","Data":"7d0e8a7d58eb6a03f390e468eb1ad7e52b5d487715e023ae80a522bca1ca18cb"} Oct 07 08:43:17 crc kubenswrapper[4875]: I1007 08:43:17.662652 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-d8d25" event={"ID":"ebe6d65b-b429-45fa-8669-4a82c83ed677","Type":"ContainerStarted","Data":"a180cb9c1274cc0d92023752f6bd2f0aa17148debc18316821205f1e9863741d"} Oct 07 08:43:17 crc kubenswrapper[4875]: I1007 08:43:17.682081 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-d8d25" podStartSLOduration=2.15486428 podStartE2EDuration="3.682060365s" podCreationTimestamp="2025-10-07 08:43:14 +0000 UTC" firstStartedPulling="2025-10-07 08:43:15.646489493 +0000 UTC m=+2820.606260036" lastFinishedPulling="2025-10-07 08:43:17.173685578 +0000 UTC m=+2822.133456121" observedRunningTime="2025-10-07 08:43:17.677302243 +0000 UTC m=+2822.637072796" watchObservedRunningTime="2025-10-07 08:43:17.682060365 +0000 UTC m=+2822.641830908" Oct 07 08:43:18 crc kubenswrapper[4875]: I1007 08:43:18.746961 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/tempest-tests-tempest"] Oct 07 08:43:18 crc kubenswrapper[4875]: I1007 08:43:18.749586 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Oct 07 08:43:18 crc kubenswrapper[4875]: I1007 08:43:18.755259 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"test-operator-controller-priv-key" Oct 07 08:43:18 crc kubenswrapper[4875]: I1007 08:43:18.755811 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-jth75" Oct 07 08:43:18 crc kubenswrapper[4875]: I1007 08:43:18.755904 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-custom-data-s0" Oct 07 08:43:18 crc kubenswrapper[4875]: I1007 08:43:18.756016 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-env-vars-s0" Oct 07 08:43:18 crc kubenswrapper[4875]: I1007 08:43:18.760372 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/tempest-tests-tempest"] Oct 07 08:43:18 crc kubenswrapper[4875]: I1007 08:43:18.918327 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/b9d21853-761a-4786-baa2-e0e00011a9d5-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"b9d21853-761a-4786-baa2-e0e00011a9d5\") " pod="openstack/tempest-tests-tempest" Oct 07 08:43:18 crc kubenswrapper[4875]: I1007 08:43:18.918378 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/b9d21853-761a-4786-baa2-e0e00011a9d5-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"b9d21853-761a-4786-baa2-e0e00011a9d5\") " pod="openstack/tempest-tests-tempest" Oct 07 08:43:18 crc kubenswrapper[4875]: I1007 08:43:18.918419 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/b9d21853-761a-4786-baa2-e0e00011a9d5-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"b9d21853-761a-4786-baa2-e0e00011a9d5\") " pod="openstack/tempest-tests-tempest" Oct 07 08:43:18 crc kubenswrapper[4875]: I1007 08:43:18.918555 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"tempest-tests-tempest\" (UID: \"b9d21853-761a-4786-baa2-e0e00011a9d5\") " pod="openstack/tempest-tests-tempest" Oct 07 08:43:18 crc kubenswrapper[4875]: I1007 08:43:18.918583 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/b9d21853-761a-4786-baa2-e0e00011a9d5-config-data\") pod \"tempest-tests-tempest\" (UID: \"b9d21853-761a-4786-baa2-e0e00011a9d5\") " pod="openstack/tempest-tests-tempest" Oct 07 08:43:18 crc kubenswrapper[4875]: I1007 08:43:18.918631 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/b9d21853-761a-4786-baa2-e0e00011a9d5-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"b9d21853-761a-4786-baa2-e0e00011a9d5\") " pod="openstack/tempest-tests-tempest" Oct 07 08:43:18 crc kubenswrapper[4875]: I1007 08:43:18.918718 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/b9d21853-761a-4786-baa2-e0e00011a9d5-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"b9d21853-761a-4786-baa2-e0e00011a9d5\") " pod="openstack/tempest-tests-tempest" Oct 07 08:43:18 crc kubenswrapper[4875]: I1007 08:43:18.918763 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b9d21853-761a-4786-baa2-e0e00011a9d5-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"b9d21853-761a-4786-baa2-e0e00011a9d5\") " pod="openstack/tempest-tests-tempest" Oct 07 08:43:18 crc kubenswrapper[4875]: I1007 08:43:18.918913 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n5ph7\" (UniqueName: \"kubernetes.io/projected/b9d21853-761a-4786-baa2-e0e00011a9d5-kube-api-access-n5ph7\") pod \"tempest-tests-tempest\" (UID: \"b9d21853-761a-4786-baa2-e0e00011a9d5\") " pod="openstack/tempest-tests-tempest" Oct 07 08:43:19 crc kubenswrapper[4875]: I1007 08:43:19.022011 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n5ph7\" (UniqueName: \"kubernetes.io/projected/b9d21853-761a-4786-baa2-e0e00011a9d5-kube-api-access-n5ph7\") pod \"tempest-tests-tempest\" (UID: \"b9d21853-761a-4786-baa2-e0e00011a9d5\") " pod="openstack/tempest-tests-tempest" Oct 07 08:43:19 crc kubenswrapper[4875]: I1007 08:43:19.022185 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/b9d21853-761a-4786-baa2-e0e00011a9d5-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"b9d21853-761a-4786-baa2-e0e00011a9d5\") " pod="openstack/tempest-tests-tempest" Oct 07 08:43:19 crc kubenswrapper[4875]: I1007 08:43:19.022272 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/b9d21853-761a-4786-baa2-e0e00011a9d5-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"b9d21853-761a-4786-baa2-e0e00011a9d5\") " pod="openstack/tempest-tests-tempest" Oct 07 08:43:19 crc kubenswrapper[4875]: I1007 08:43:19.022323 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/b9d21853-761a-4786-baa2-e0e00011a9d5-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"b9d21853-761a-4786-baa2-e0e00011a9d5\") " pod="openstack/tempest-tests-tempest" Oct 07 08:43:19 crc kubenswrapper[4875]: I1007 08:43:19.022420 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"tempest-tests-tempest\" (UID: \"b9d21853-761a-4786-baa2-e0e00011a9d5\") " pod="openstack/tempest-tests-tempest" Oct 07 08:43:19 crc kubenswrapper[4875]: I1007 08:43:19.022476 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/b9d21853-761a-4786-baa2-e0e00011a9d5-config-data\") pod \"tempest-tests-tempest\" (UID: \"b9d21853-761a-4786-baa2-e0e00011a9d5\") " pod="openstack/tempest-tests-tempest" Oct 07 08:43:19 crc kubenswrapper[4875]: I1007 08:43:19.022591 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/b9d21853-761a-4786-baa2-e0e00011a9d5-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"b9d21853-761a-4786-baa2-e0e00011a9d5\") " pod="openstack/tempest-tests-tempest" Oct 07 08:43:19 crc kubenswrapper[4875]: I1007 08:43:19.022713 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/b9d21853-761a-4786-baa2-e0e00011a9d5-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"b9d21853-761a-4786-baa2-e0e00011a9d5\") " pod="openstack/tempest-tests-tempest" Oct 07 08:43:19 crc kubenswrapper[4875]: I1007 08:43:19.022816 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b9d21853-761a-4786-baa2-e0e00011a9d5-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"b9d21853-761a-4786-baa2-e0e00011a9d5\") " pod="openstack/tempest-tests-tempest" Oct 07 08:43:19 crc kubenswrapper[4875]: I1007 08:43:19.022966 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/b9d21853-761a-4786-baa2-e0e00011a9d5-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"b9d21853-761a-4786-baa2-e0e00011a9d5\") " pod="openstack/tempest-tests-tempest" Oct 07 08:43:19 crc kubenswrapper[4875]: I1007 08:43:19.023257 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/b9d21853-761a-4786-baa2-e0e00011a9d5-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"b9d21853-761a-4786-baa2-e0e00011a9d5\") " pod="openstack/tempest-tests-tempest" Oct 07 08:43:19 crc kubenswrapper[4875]: I1007 08:43:19.023379 4875 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"tempest-tests-tempest\" (UID: \"b9d21853-761a-4786-baa2-e0e00011a9d5\") device mount path \"/mnt/openstack/pv04\"" pod="openstack/tempest-tests-tempest" Oct 07 08:43:19 crc kubenswrapper[4875]: I1007 08:43:19.024052 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/b9d21853-761a-4786-baa2-e0e00011a9d5-config-data\") pod \"tempest-tests-tempest\" (UID: \"b9d21853-761a-4786-baa2-e0e00011a9d5\") " pod="openstack/tempest-tests-tempest" Oct 07 08:43:19 crc kubenswrapper[4875]: I1007 08:43:19.024259 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/b9d21853-761a-4786-baa2-e0e00011a9d5-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"b9d21853-761a-4786-baa2-e0e00011a9d5\") " pod="openstack/tempest-tests-tempest" Oct 07 08:43:19 crc kubenswrapper[4875]: I1007 08:43:19.030572 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b9d21853-761a-4786-baa2-e0e00011a9d5-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"b9d21853-761a-4786-baa2-e0e00011a9d5\") " pod="openstack/tempest-tests-tempest" Oct 07 08:43:19 crc kubenswrapper[4875]: I1007 08:43:19.031054 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/b9d21853-761a-4786-baa2-e0e00011a9d5-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"b9d21853-761a-4786-baa2-e0e00011a9d5\") " pod="openstack/tempest-tests-tempest" Oct 07 08:43:19 crc kubenswrapper[4875]: I1007 08:43:19.031418 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/b9d21853-761a-4786-baa2-e0e00011a9d5-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"b9d21853-761a-4786-baa2-e0e00011a9d5\") " pod="openstack/tempest-tests-tempest" Oct 07 08:43:19 crc kubenswrapper[4875]: I1007 08:43:19.040269 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n5ph7\" (UniqueName: \"kubernetes.io/projected/b9d21853-761a-4786-baa2-e0e00011a9d5-kube-api-access-n5ph7\") pod \"tempest-tests-tempest\" (UID: \"b9d21853-761a-4786-baa2-e0e00011a9d5\") " pod="openstack/tempest-tests-tempest" Oct 07 08:43:19 crc kubenswrapper[4875]: I1007 08:43:19.074596 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"tempest-tests-tempest\" (UID: \"b9d21853-761a-4786-baa2-e0e00011a9d5\") " pod="openstack/tempest-tests-tempest" Oct 07 08:43:19 crc kubenswrapper[4875]: I1007 08:43:19.368995 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Oct 07 08:43:19 crc kubenswrapper[4875]: W1007 08:43:19.837260 4875 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb9d21853_761a_4786_baa2_e0e00011a9d5.slice/crio-65c597711a567ede7379db91788acff0151c7c51c005006f56dde18addb96d8e WatchSource:0}: Error finding container 65c597711a567ede7379db91788acff0151c7c51c005006f56dde18addb96d8e: Status 404 returned error can't find the container with id 65c597711a567ede7379db91788acff0151c7c51c005006f56dde18addb96d8e Oct 07 08:43:19 crc kubenswrapper[4875]: I1007 08:43:19.837576 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/tempest-tests-tempest"] Oct 07 08:43:20 crc kubenswrapper[4875]: I1007 08:43:20.702153 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"b9d21853-761a-4786-baa2-e0e00011a9d5","Type":"ContainerStarted","Data":"65c597711a567ede7379db91788acff0151c7c51c005006f56dde18addb96d8e"} Oct 07 08:43:24 crc kubenswrapper[4875]: I1007 08:43:24.687850 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-d8d25" Oct 07 08:43:24 crc kubenswrapper[4875]: I1007 08:43:24.691180 4875 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-d8d25" Oct 07 08:43:24 crc kubenswrapper[4875]: I1007 08:43:24.744844 4875 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-d8d25" Oct 07 08:43:25 crc kubenswrapper[4875]: I1007 08:43:25.805628 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-d8d25" Oct 07 08:43:25 crc kubenswrapper[4875]: I1007 08:43:25.867561 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-d8d25"] Oct 07 08:43:27 crc kubenswrapper[4875]: I1007 08:43:27.774137 4875 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-d8d25" podUID="ebe6d65b-b429-45fa-8669-4a82c83ed677" containerName="registry-server" containerID="cri-o://a180cb9c1274cc0d92023752f6bd2f0aa17148debc18316821205f1e9863741d" gracePeriod=2 Oct 07 08:43:28 crc kubenswrapper[4875]: I1007 08:43:28.784039 4875 generic.go:334] "Generic (PLEG): container finished" podID="ebe6d65b-b429-45fa-8669-4a82c83ed677" containerID="a180cb9c1274cc0d92023752f6bd2f0aa17148debc18316821205f1e9863741d" exitCode=0 Oct 07 08:43:28 crc kubenswrapper[4875]: I1007 08:43:28.784116 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-d8d25" event={"ID":"ebe6d65b-b429-45fa-8669-4a82c83ed677","Type":"ContainerDied","Data":"a180cb9c1274cc0d92023752f6bd2f0aa17148debc18316821205f1e9863741d"} Oct 07 08:43:31 crc kubenswrapper[4875]: I1007 08:43:31.221171 4875 patch_prober.go:28] interesting pod/machine-config-daemon-hx68m container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 07 08:43:31 crc kubenswrapper[4875]: I1007 08:43:31.221235 4875 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" podUID="3928c10c-c3da-41eb-96b2-629d67cfb31f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 07 08:43:34 crc kubenswrapper[4875]: E1007 08:43:34.687619 4875 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of a180cb9c1274cc0d92023752f6bd2f0aa17148debc18316821205f1e9863741d is running failed: container process not found" containerID="a180cb9c1274cc0d92023752f6bd2f0aa17148debc18316821205f1e9863741d" cmd=["grpc_health_probe","-addr=:50051"] Oct 07 08:43:34 crc kubenswrapper[4875]: E1007 08:43:34.688610 4875 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of a180cb9c1274cc0d92023752f6bd2f0aa17148debc18316821205f1e9863741d is running failed: container process not found" containerID="a180cb9c1274cc0d92023752f6bd2f0aa17148debc18316821205f1e9863741d" cmd=["grpc_health_probe","-addr=:50051"] Oct 07 08:43:34 crc kubenswrapper[4875]: E1007 08:43:34.688983 4875 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of a180cb9c1274cc0d92023752f6bd2f0aa17148debc18316821205f1e9863741d is running failed: container process not found" containerID="a180cb9c1274cc0d92023752f6bd2f0aa17148debc18316821205f1e9863741d" cmd=["grpc_health_probe","-addr=:50051"] Oct 07 08:43:34 crc kubenswrapper[4875]: E1007 08:43:34.689014 4875 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of a180cb9c1274cc0d92023752f6bd2f0aa17148debc18316821205f1e9863741d is running failed: container process not found" probeType="Readiness" pod="openshift-marketplace/certified-operators-d8d25" podUID="ebe6d65b-b429-45fa-8669-4a82c83ed677" containerName="registry-server" Oct 07 08:43:44 crc kubenswrapper[4875]: E1007 08:43:44.691556 4875 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of a180cb9c1274cc0d92023752f6bd2f0aa17148debc18316821205f1e9863741d is running failed: container process not found" containerID="a180cb9c1274cc0d92023752f6bd2f0aa17148debc18316821205f1e9863741d" cmd=["grpc_health_probe","-addr=:50051"] Oct 07 08:43:44 crc kubenswrapper[4875]: E1007 08:43:44.692930 4875 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of a180cb9c1274cc0d92023752f6bd2f0aa17148debc18316821205f1e9863741d is running failed: container process not found" containerID="a180cb9c1274cc0d92023752f6bd2f0aa17148debc18316821205f1e9863741d" cmd=["grpc_health_probe","-addr=:50051"] Oct 07 08:43:44 crc kubenswrapper[4875]: E1007 08:43:44.693551 4875 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of a180cb9c1274cc0d92023752f6bd2f0aa17148debc18316821205f1e9863741d is running failed: container process not found" containerID="a180cb9c1274cc0d92023752f6bd2f0aa17148debc18316821205f1e9863741d" cmd=["grpc_health_probe","-addr=:50051"] Oct 07 08:43:44 crc kubenswrapper[4875]: E1007 08:43:44.693589 4875 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of a180cb9c1274cc0d92023752f6bd2f0aa17148debc18316821205f1e9863741d is running failed: container process not found" probeType="Readiness" pod="openshift-marketplace/certified-operators-d8d25" podUID="ebe6d65b-b429-45fa-8669-4a82c83ed677" containerName="registry-server" Oct 07 08:43:44 crc kubenswrapper[4875]: I1007 08:43:44.741599 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-d8d25" Oct 07 08:43:44 crc kubenswrapper[4875]: E1007 08:43:44.748413 4875 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-tempest-all:current-podified" Oct 07 08:43:44 crc kubenswrapper[4875]: E1007 08:43:44.748656 4875 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:tempest-tests-tempest-tests-runner,Image:quay.io/podified-antelope-centos9/openstack-tempest-all:current-podified,Command:[],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:test-operator-ephemeral-workdir,ReadOnly:false,MountPath:/var/lib/tempest,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:test-operator-ephemeral-temporary,ReadOnly:false,MountPath:/tmp,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:false,MountPath:/etc/test_operator,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:test-operator-logs,ReadOnly:false,MountPath:/var/lib/tempest/external_files,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config,ReadOnly:true,MountPath:/etc/openstack/clouds.yaml,SubPath:clouds.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config,ReadOnly:true,MountPath:/var/lib/tempest/.config/openstack/clouds.yaml,SubPath:clouds.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config-secret,ReadOnly:false,MountPath:/etc/openstack/secure.yaml,SubPath:secure.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ca-certs,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ssh-key,ReadOnly:false,MountPath:/var/lib/tempest/id_ecdsa,SubPath:ssh_key,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-n5ph7,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42480,RunAsNonRoot:*false,ReadOnlyRootFilesystem:*false,AllowPrivilegeEscalation:*true,RunAsGroup:*42480,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{EnvFromSource{Prefix:,ConfigMapRef:&ConfigMapEnvSource{LocalObjectReference:LocalObjectReference{Name:tempest-tests-tempest-custom-data-s0,},Optional:nil,},SecretRef:nil,},EnvFromSource{Prefix:,ConfigMapRef:&ConfigMapEnvSource{LocalObjectReference:LocalObjectReference{Name:tempest-tests-tempest-env-vars-s0,},Optional:nil,},SecretRef:nil,},},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod tempest-tests-tempest_openstack(b9d21853-761a-4786-baa2-e0e00011a9d5): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 07 08:43:44 crc kubenswrapper[4875]: E1007 08:43:44.749947 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"tempest-tests-tempest-tests-runner\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/tempest-tests-tempest" podUID="b9d21853-761a-4786-baa2-e0e00011a9d5" Oct 07 08:43:44 crc kubenswrapper[4875]: I1007 08:43:44.758236 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-55dbc\" (UniqueName: \"kubernetes.io/projected/ebe6d65b-b429-45fa-8669-4a82c83ed677-kube-api-access-55dbc\") pod \"ebe6d65b-b429-45fa-8669-4a82c83ed677\" (UID: \"ebe6d65b-b429-45fa-8669-4a82c83ed677\") " Oct 07 08:43:44 crc kubenswrapper[4875]: I1007 08:43:44.758305 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ebe6d65b-b429-45fa-8669-4a82c83ed677-catalog-content\") pod \"ebe6d65b-b429-45fa-8669-4a82c83ed677\" (UID: \"ebe6d65b-b429-45fa-8669-4a82c83ed677\") " Oct 07 08:43:44 crc kubenswrapper[4875]: I1007 08:43:44.758432 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ebe6d65b-b429-45fa-8669-4a82c83ed677-utilities\") pod \"ebe6d65b-b429-45fa-8669-4a82c83ed677\" (UID: \"ebe6d65b-b429-45fa-8669-4a82c83ed677\") " Oct 07 08:43:44 crc kubenswrapper[4875]: I1007 08:43:44.761380 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ebe6d65b-b429-45fa-8669-4a82c83ed677-utilities" (OuterVolumeSpecName: "utilities") pod "ebe6d65b-b429-45fa-8669-4a82c83ed677" (UID: "ebe6d65b-b429-45fa-8669-4a82c83ed677"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 08:43:44 crc kubenswrapper[4875]: I1007 08:43:44.770057 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ebe6d65b-b429-45fa-8669-4a82c83ed677-kube-api-access-55dbc" (OuterVolumeSpecName: "kube-api-access-55dbc") pod "ebe6d65b-b429-45fa-8669-4a82c83ed677" (UID: "ebe6d65b-b429-45fa-8669-4a82c83ed677"). InnerVolumeSpecName "kube-api-access-55dbc". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 08:43:44 crc kubenswrapper[4875]: I1007 08:43:44.799936 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ebe6d65b-b429-45fa-8669-4a82c83ed677-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ebe6d65b-b429-45fa-8669-4a82c83ed677" (UID: "ebe6d65b-b429-45fa-8669-4a82c83ed677"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 08:43:44 crc kubenswrapper[4875]: I1007 08:43:44.860427 4875 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ebe6d65b-b429-45fa-8669-4a82c83ed677-utilities\") on node \"crc\" DevicePath \"\"" Oct 07 08:43:44 crc kubenswrapper[4875]: I1007 08:43:44.860506 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-55dbc\" (UniqueName: \"kubernetes.io/projected/ebe6d65b-b429-45fa-8669-4a82c83ed677-kube-api-access-55dbc\") on node \"crc\" DevicePath \"\"" Oct 07 08:43:44 crc kubenswrapper[4875]: I1007 08:43:44.860527 4875 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ebe6d65b-b429-45fa-8669-4a82c83ed677-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 07 08:43:44 crc kubenswrapper[4875]: I1007 08:43:44.965230 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-d8d25" Oct 07 08:43:44 crc kubenswrapper[4875]: I1007 08:43:44.965253 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-d8d25" event={"ID":"ebe6d65b-b429-45fa-8669-4a82c83ed677","Type":"ContainerDied","Data":"47f27776e8247e3afa1bb653640a0b3732913af6bce3eb6835515b4ccfa2ac29"} Oct 07 08:43:44 crc kubenswrapper[4875]: I1007 08:43:44.965362 4875 scope.go:117] "RemoveContainer" containerID="a180cb9c1274cc0d92023752f6bd2f0aa17148debc18316821205f1e9863741d" Oct 07 08:43:44 crc kubenswrapper[4875]: E1007 08:43:44.968782 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"tempest-tests-tempest-tests-runner\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-tempest-all:current-podified\\\"\"" pod="openstack/tempest-tests-tempest" podUID="b9d21853-761a-4786-baa2-e0e00011a9d5" Oct 07 08:43:45 crc kubenswrapper[4875]: I1007 08:43:45.019376 4875 scope.go:117] "RemoveContainer" containerID="7d0e8a7d58eb6a03f390e468eb1ad7e52b5d487715e023ae80a522bca1ca18cb" Oct 07 08:43:45 crc kubenswrapper[4875]: I1007 08:43:45.033289 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-d8d25"] Oct 07 08:43:45 crc kubenswrapper[4875]: I1007 08:43:45.042220 4875 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-d8d25"] Oct 07 08:43:45 crc kubenswrapper[4875]: I1007 08:43:45.049526 4875 scope.go:117] "RemoveContainer" containerID="cfae90d1a9ca31931646e35a39a7b98868cb157795b7c809da7ac0a9567285e4" Oct 07 08:43:45 crc kubenswrapper[4875]: I1007 08:43:45.711971 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ebe6d65b-b429-45fa-8669-4a82c83ed677" path="/var/lib/kubelet/pods/ebe6d65b-b429-45fa-8669-4a82c83ed677/volumes" Oct 07 08:44:00 crc kubenswrapper[4875]: I1007 08:44:00.087606 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"b9d21853-761a-4786-baa2-e0e00011a9d5","Type":"ContainerStarted","Data":"974a44a268306b4822a248a188aba8b617229741edb6e1a3b62c1fb497304a5d"} Oct 07 08:44:01 crc kubenswrapper[4875]: I1007 08:44:01.221617 4875 patch_prober.go:28] interesting pod/machine-config-daemon-hx68m container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 07 08:44:01 crc kubenswrapper[4875]: I1007 08:44:01.221674 4875 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" podUID="3928c10c-c3da-41eb-96b2-629d67cfb31f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 07 08:44:31 crc kubenswrapper[4875]: I1007 08:44:31.221629 4875 patch_prober.go:28] interesting pod/machine-config-daemon-hx68m container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 07 08:44:31 crc kubenswrapper[4875]: I1007 08:44:31.222257 4875 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" podUID="3928c10c-c3da-41eb-96b2-629d67cfb31f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 07 08:44:31 crc kubenswrapper[4875]: I1007 08:44:31.222304 4875 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" Oct 07 08:44:31 crc kubenswrapper[4875]: I1007 08:44:31.223109 4875 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"fd0786c86e596eee0132032615ed75314bf901a7421dc6afedda1f75eccd9ec4"} pod="openshift-machine-config-operator/machine-config-daemon-hx68m" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 07 08:44:31 crc kubenswrapper[4875]: I1007 08:44:31.223160 4875 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" podUID="3928c10c-c3da-41eb-96b2-629d67cfb31f" containerName="machine-config-daemon" containerID="cri-o://fd0786c86e596eee0132032615ed75314bf901a7421dc6afedda1f75eccd9ec4" gracePeriod=600 Oct 07 08:44:31 crc kubenswrapper[4875]: E1007 08:44:31.352081 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hx68m_openshift-machine-config-operator(3928c10c-c3da-41eb-96b2-629d67cfb31f)\"" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" podUID="3928c10c-c3da-41eb-96b2-629d67cfb31f" Oct 07 08:44:31 crc kubenswrapper[4875]: I1007 08:44:31.362149 4875 generic.go:334] "Generic (PLEG): container finished" podID="3928c10c-c3da-41eb-96b2-629d67cfb31f" containerID="fd0786c86e596eee0132032615ed75314bf901a7421dc6afedda1f75eccd9ec4" exitCode=0 Oct 07 08:44:31 crc kubenswrapper[4875]: I1007 08:44:31.362201 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" event={"ID":"3928c10c-c3da-41eb-96b2-629d67cfb31f","Type":"ContainerDied","Data":"fd0786c86e596eee0132032615ed75314bf901a7421dc6afedda1f75eccd9ec4"} Oct 07 08:44:31 crc kubenswrapper[4875]: I1007 08:44:31.362249 4875 scope.go:117] "RemoveContainer" containerID="ab9712b6dc10fb7a1edd0937544493d3a1cf8c35c688c4d5cfe0f4459039f33b" Oct 07 08:44:31 crc kubenswrapper[4875]: I1007 08:44:31.363489 4875 scope.go:117] "RemoveContainer" containerID="fd0786c86e596eee0132032615ed75314bf901a7421dc6afedda1f75eccd9ec4" Oct 07 08:44:31 crc kubenswrapper[4875]: E1007 08:44:31.363865 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hx68m_openshift-machine-config-operator(3928c10c-c3da-41eb-96b2-629d67cfb31f)\"" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" podUID="3928c10c-c3da-41eb-96b2-629d67cfb31f" Oct 07 08:44:31 crc kubenswrapper[4875]: I1007 08:44:31.385622 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/tempest-tests-tempest" podStartSLOduration=35.87661294 podStartE2EDuration="1m14.385584054s" podCreationTimestamp="2025-10-07 08:43:17 +0000 UTC" firstStartedPulling="2025-10-07 08:43:19.839954406 +0000 UTC m=+2824.799724949" lastFinishedPulling="2025-10-07 08:43:58.34892552 +0000 UTC m=+2863.308696063" observedRunningTime="2025-10-07 08:44:00.123725764 +0000 UTC m=+2865.083496307" watchObservedRunningTime="2025-10-07 08:44:31.385584054 +0000 UTC m=+2896.345354607" Oct 07 08:44:46 crc kubenswrapper[4875]: I1007 08:44:46.698595 4875 scope.go:117] "RemoveContainer" containerID="fd0786c86e596eee0132032615ed75314bf901a7421dc6afedda1f75eccd9ec4" Oct 07 08:44:46 crc kubenswrapper[4875]: E1007 08:44:46.699346 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hx68m_openshift-machine-config-operator(3928c10c-c3da-41eb-96b2-629d67cfb31f)\"" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" podUID="3928c10c-c3da-41eb-96b2-629d67cfb31f" Oct 07 08:45:00 crc kubenswrapper[4875]: I1007 08:45:00.178571 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29330445-xmb46"] Oct 07 08:45:00 crc kubenswrapper[4875]: E1007 08:45:00.179522 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ebe6d65b-b429-45fa-8669-4a82c83ed677" containerName="extract-utilities" Oct 07 08:45:00 crc kubenswrapper[4875]: I1007 08:45:00.179539 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="ebe6d65b-b429-45fa-8669-4a82c83ed677" containerName="extract-utilities" Oct 07 08:45:00 crc kubenswrapper[4875]: E1007 08:45:00.179580 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ebe6d65b-b429-45fa-8669-4a82c83ed677" containerName="extract-content" Oct 07 08:45:00 crc kubenswrapper[4875]: I1007 08:45:00.179588 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="ebe6d65b-b429-45fa-8669-4a82c83ed677" containerName="extract-content" Oct 07 08:45:00 crc kubenswrapper[4875]: E1007 08:45:00.179604 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ebe6d65b-b429-45fa-8669-4a82c83ed677" containerName="registry-server" Oct 07 08:45:00 crc kubenswrapper[4875]: I1007 08:45:00.179611 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="ebe6d65b-b429-45fa-8669-4a82c83ed677" containerName="registry-server" Oct 07 08:45:00 crc kubenswrapper[4875]: I1007 08:45:00.179809 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="ebe6d65b-b429-45fa-8669-4a82c83ed677" containerName="registry-server" Oct 07 08:45:00 crc kubenswrapper[4875]: I1007 08:45:00.180488 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29330445-xmb46" Oct 07 08:45:00 crc kubenswrapper[4875]: I1007 08:45:00.182650 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Oct 07 08:45:00 crc kubenswrapper[4875]: I1007 08:45:00.182659 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Oct 07 08:45:00 crc kubenswrapper[4875]: I1007 08:45:00.188709 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29330445-xmb46"] Oct 07 08:45:00 crc kubenswrapper[4875]: I1007 08:45:00.346855 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zmmw4\" (UniqueName: \"kubernetes.io/projected/1116c79f-2cba-46ea-a3d2-875d952bb032-kube-api-access-zmmw4\") pod \"collect-profiles-29330445-xmb46\" (UID: \"1116c79f-2cba-46ea-a3d2-875d952bb032\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29330445-xmb46" Oct 07 08:45:00 crc kubenswrapper[4875]: I1007 08:45:00.347251 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/1116c79f-2cba-46ea-a3d2-875d952bb032-secret-volume\") pod \"collect-profiles-29330445-xmb46\" (UID: \"1116c79f-2cba-46ea-a3d2-875d952bb032\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29330445-xmb46" Oct 07 08:45:00 crc kubenswrapper[4875]: I1007 08:45:00.347456 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/1116c79f-2cba-46ea-a3d2-875d952bb032-config-volume\") pod \"collect-profiles-29330445-xmb46\" (UID: \"1116c79f-2cba-46ea-a3d2-875d952bb032\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29330445-xmb46" Oct 07 08:45:00 crc kubenswrapper[4875]: I1007 08:45:00.449679 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zmmw4\" (UniqueName: \"kubernetes.io/projected/1116c79f-2cba-46ea-a3d2-875d952bb032-kube-api-access-zmmw4\") pod \"collect-profiles-29330445-xmb46\" (UID: \"1116c79f-2cba-46ea-a3d2-875d952bb032\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29330445-xmb46" Oct 07 08:45:00 crc kubenswrapper[4875]: I1007 08:45:00.450339 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/1116c79f-2cba-46ea-a3d2-875d952bb032-secret-volume\") pod \"collect-profiles-29330445-xmb46\" (UID: \"1116c79f-2cba-46ea-a3d2-875d952bb032\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29330445-xmb46" Oct 07 08:45:00 crc kubenswrapper[4875]: I1007 08:45:00.450486 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/1116c79f-2cba-46ea-a3d2-875d952bb032-config-volume\") pod \"collect-profiles-29330445-xmb46\" (UID: \"1116c79f-2cba-46ea-a3d2-875d952bb032\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29330445-xmb46" Oct 07 08:45:00 crc kubenswrapper[4875]: I1007 08:45:00.451307 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/1116c79f-2cba-46ea-a3d2-875d952bb032-config-volume\") pod \"collect-profiles-29330445-xmb46\" (UID: \"1116c79f-2cba-46ea-a3d2-875d952bb032\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29330445-xmb46" Oct 07 08:45:00 crc kubenswrapper[4875]: I1007 08:45:00.456524 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/1116c79f-2cba-46ea-a3d2-875d952bb032-secret-volume\") pod \"collect-profiles-29330445-xmb46\" (UID: \"1116c79f-2cba-46ea-a3d2-875d952bb032\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29330445-xmb46" Oct 07 08:45:00 crc kubenswrapper[4875]: I1007 08:45:00.465692 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zmmw4\" (UniqueName: \"kubernetes.io/projected/1116c79f-2cba-46ea-a3d2-875d952bb032-kube-api-access-zmmw4\") pod \"collect-profiles-29330445-xmb46\" (UID: \"1116c79f-2cba-46ea-a3d2-875d952bb032\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29330445-xmb46" Oct 07 08:45:00 crc kubenswrapper[4875]: I1007 08:45:00.513039 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29330445-xmb46" Oct 07 08:45:00 crc kubenswrapper[4875]: I1007 08:45:00.951126 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29330445-xmb46"] Oct 07 08:45:00 crc kubenswrapper[4875]: W1007 08:45:00.955413 4875 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1116c79f_2cba_46ea_a3d2_875d952bb032.slice/crio-25288a004e343fbb869b40f34e0859581bd6e6d55fd53d7e41f2de8107a70653 WatchSource:0}: Error finding container 25288a004e343fbb869b40f34e0859581bd6e6d55fd53d7e41f2de8107a70653: Status 404 returned error can't find the container with id 25288a004e343fbb869b40f34e0859581bd6e6d55fd53d7e41f2de8107a70653 Oct 07 08:45:01 crc kubenswrapper[4875]: I1007 08:45:01.405472 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-gprv9"] Oct 07 08:45:01 crc kubenswrapper[4875]: I1007 08:45:01.407713 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-gprv9" Oct 07 08:45:01 crc kubenswrapper[4875]: I1007 08:45:01.423018 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-gprv9"] Oct 07 08:45:01 crc kubenswrapper[4875]: I1007 08:45:01.470117 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d84fb28-3d42-4968-a731-5aeae00fbb14-utilities\") pod \"community-operators-gprv9\" (UID: \"1d84fb28-3d42-4968-a731-5aeae00fbb14\") " pod="openshift-marketplace/community-operators-gprv9" Oct 07 08:45:01 crc kubenswrapper[4875]: I1007 08:45:01.470265 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d84fb28-3d42-4968-a731-5aeae00fbb14-catalog-content\") pod \"community-operators-gprv9\" (UID: \"1d84fb28-3d42-4968-a731-5aeae00fbb14\") " pod="openshift-marketplace/community-operators-gprv9" Oct 07 08:45:01 crc kubenswrapper[4875]: I1007 08:45:01.470313 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2knvr\" (UniqueName: \"kubernetes.io/projected/1d84fb28-3d42-4968-a731-5aeae00fbb14-kube-api-access-2knvr\") pod \"community-operators-gprv9\" (UID: \"1d84fb28-3d42-4968-a731-5aeae00fbb14\") " pod="openshift-marketplace/community-operators-gprv9" Oct 07 08:45:01 crc kubenswrapper[4875]: I1007 08:45:01.572236 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d84fb28-3d42-4968-a731-5aeae00fbb14-utilities\") pod \"community-operators-gprv9\" (UID: \"1d84fb28-3d42-4968-a731-5aeae00fbb14\") " pod="openshift-marketplace/community-operators-gprv9" Oct 07 08:45:01 crc kubenswrapper[4875]: I1007 08:45:01.572354 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d84fb28-3d42-4968-a731-5aeae00fbb14-catalog-content\") pod \"community-operators-gprv9\" (UID: \"1d84fb28-3d42-4968-a731-5aeae00fbb14\") " pod="openshift-marketplace/community-operators-gprv9" Oct 07 08:45:01 crc kubenswrapper[4875]: I1007 08:45:01.572387 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2knvr\" (UniqueName: \"kubernetes.io/projected/1d84fb28-3d42-4968-a731-5aeae00fbb14-kube-api-access-2knvr\") pod \"community-operators-gprv9\" (UID: \"1d84fb28-3d42-4968-a731-5aeae00fbb14\") " pod="openshift-marketplace/community-operators-gprv9" Oct 07 08:45:01 crc kubenswrapper[4875]: I1007 08:45:01.572847 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d84fb28-3d42-4968-a731-5aeae00fbb14-utilities\") pod \"community-operators-gprv9\" (UID: \"1d84fb28-3d42-4968-a731-5aeae00fbb14\") " pod="openshift-marketplace/community-operators-gprv9" Oct 07 08:45:01 crc kubenswrapper[4875]: I1007 08:45:01.572954 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d84fb28-3d42-4968-a731-5aeae00fbb14-catalog-content\") pod \"community-operators-gprv9\" (UID: \"1d84fb28-3d42-4968-a731-5aeae00fbb14\") " pod="openshift-marketplace/community-operators-gprv9" Oct 07 08:45:01 crc kubenswrapper[4875]: I1007 08:45:01.604008 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2knvr\" (UniqueName: \"kubernetes.io/projected/1d84fb28-3d42-4968-a731-5aeae00fbb14-kube-api-access-2knvr\") pod \"community-operators-gprv9\" (UID: \"1d84fb28-3d42-4968-a731-5aeae00fbb14\") " pod="openshift-marketplace/community-operators-gprv9" Oct 07 08:45:01 crc kubenswrapper[4875]: I1007 08:45:01.617773 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29330445-xmb46" event={"ID":"1116c79f-2cba-46ea-a3d2-875d952bb032","Type":"ContainerStarted","Data":"4b1712c548b6ecb6083e3a918c47a90b84e504f881b996bbfb748886a2cc285f"} Oct 07 08:45:01 crc kubenswrapper[4875]: I1007 08:45:01.617821 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29330445-xmb46" event={"ID":"1116c79f-2cba-46ea-a3d2-875d952bb032","Type":"ContainerStarted","Data":"25288a004e343fbb869b40f34e0859581bd6e6d55fd53d7e41f2de8107a70653"} Oct 07 08:45:01 crc kubenswrapper[4875]: I1007 08:45:01.631404 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29330445-xmb46" podStartSLOduration=1.631387969 podStartE2EDuration="1.631387969s" podCreationTimestamp="2025-10-07 08:45:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 08:45:01.63015361 +0000 UTC m=+2926.589924153" watchObservedRunningTime="2025-10-07 08:45:01.631387969 +0000 UTC m=+2926.591158512" Oct 07 08:45:01 crc kubenswrapper[4875]: I1007 08:45:01.697924 4875 scope.go:117] "RemoveContainer" containerID="fd0786c86e596eee0132032615ed75314bf901a7421dc6afedda1f75eccd9ec4" Oct 07 08:45:01 crc kubenswrapper[4875]: E1007 08:45:01.698423 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hx68m_openshift-machine-config-operator(3928c10c-c3da-41eb-96b2-629d67cfb31f)\"" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" podUID="3928c10c-c3da-41eb-96b2-629d67cfb31f" Oct 07 08:45:01 crc kubenswrapper[4875]: I1007 08:45:01.728807 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-gprv9" Oct 07 08:45:02 crc kubenswrapper[4875]: I1007 08:45:02.287749 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-gprv9"] Oct 07 08:45:02 crc kubenswrapper[4875]: W1007 08:45:02.289673 4875 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1d84fb28_3d42_4968_a731_5aeae00fbb14.slice/crio-97658d8a559821903017e84d3596757c3f4dd581d85aec8039ab13e6b665d369 WatchSource:0}: Error finding container 97658d8a559821903017e84d3596757c3f4dd581d85aec8039ab13e6b665d369: Status 404 returned error can't find the container with id 97658d8a559821903017e84d3596757c3f4dd581d85aec8039ab13e6b665d369 Oct 07 08:45:02 crc kubenswrapper[4875]: I1007 08:45:02.628254 4875 generic.go:334] "Generic (PLEG): container finished" podID="1116c79f-2cba-46ea-a3d2-875d952bb032" containerID="4b1712c548b6ecb6083e3a918c47a90b84e504f881b996bbfb748886a2cc285f" exitCode=0 Oct 07 08:45:02 crc kubenswrapper[4875]: I1007 08:45:02.628329 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29330445-xmb46" event={"ID":"1116c79f-2cba-46ea-a3d2-875d952bb032","Type":"ContainerDied","Data":"4b1712c548b6ecb6083e3a918c47a90b84e504f881b996bbfb748886a2cc285f"} Oct 07 08:45:02 crc kubenswrapper[4875]: I1007 08:45:02.631709 4875 generic.go:334] "Generic (PLEG): container finished" podID="1d84fb28-3d42-4968-a731-5aeae00fbb14" containerID="b58e2bfbb5e2b92a3d90fc578787cca7f3f27fa418b542c13a009d7f6610159d" exitCode=0 Oct 07 08:45:02 crc kubenswrapper[4875]: I1007 08:45:02.631740 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gprv9" event={"ID":"1d84fb28-3d42-4968-a731-5aeae00fbb14","Type":"ContainerDied","Data":"b58e2bfbb5e2b92a3d90fc578787cca7f3f27fa418b542c13a009d7f6610159d"} Oct 07 08:45:02 crc kubenswrapper[4875]: I1007 08:45:02.631791 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gprv9" event={"ID":"1d84fb28-3d42-4968-a731-5aeae00fbb14","Type":"ContainerStarted","Data":"97658d8a559821903017e84d3596757c3f4dd581d85aec8039ab13e6b665d369"} Oct 07 08:45:04 crc kubenswrapper[4875]: I1007 08:45:04.063377 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29330445-xmb46" Oct 07 08:45:04 crc kubenswrapper[4875]: I1007 08:45:04.225812 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zmmw4\" (UniqueName: \"kubernetes.io/projected/1116c79f-2cba-46ea-a3d2-875d952bb032-kube-api-access-zmmw4\") pod \"1116c79f-2cba-46ea-a3d2-875d952bb032\" (UID: \"1116c79f-2cba-46ea-a3d2-875d952bb032\") " Oct 07 08:45:04 crc kubenswrapper[4875]: I1007 08:45:04.226013 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/1116c79f-2cba-46ea-a3d2-875d952bb032-secret-volume\") pod \"1116c79f-2cba-46ea-a3d2-875d952bb032\" (UID: \"1116c79f-2cba-46ea-a3d2-875d952bb032\") " Oct 07 08:45:04 crc kubenswrapper[4875]: I1007 08:45:04.226089 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/1116c79f-2cba-46ea-a3d2-875d952bb032-config-volume\") pod \"1116c79f-2cba-46ea-a3d2-875d952bb032\" (UID: \"1116c79f-2cba-46ea-a3d2-875d952bb032\") " Oct 07 08:45:04 crc kubenswrapper[4875]: I1007 08:45:04.226649 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1116c79f-2cba-46ea-a3d2-875d952bb032-config-volume" (OuterVolumeSpecName: "config-volume") pod "1116c79f-2cba-46ea-a3d2-875d952bb032" (UID: "1116c79f-2cba-46ea-a3d2-875d952bb032"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 08:45:04 crc kubenswrapper[4875]: I1007 08:45:04.226850 4875 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/1116c79f-2cba-46ea-a3d2-875d952bb032-config-volume\") on node \"crc\" DevicePath \"\"" Oct 07 08:45:04 crc kubenswrapper[4875]: I1007 08:45:04.231832 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1116c79f-2cba-46ea-a3d2-875d952bb032-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "1116c79f-2cba-46ea-a3d2-875d952bb032" (UID: "1116c79f-2cba-46ea-a3d2-875d952bb032"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:45:04 crc kubenswrapper[4875]: I1007 08:45:04.232425 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1116c79f-2cba-46ea-a3d2-875d952bb032-kube-api-access-zmmw4" (OuterVolumeSpecName: "kube-api-access-zmmw4") pod "1116c79f-2cba-46ea-a3d2-875d952bb032" (UID: "1116c79f-2cba-46ea-a3d2-875d952bb032"). InnerVolumeSpecName "kube-api-access-zmmw4". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 08:45:04 crc kubenswrapper[4875]: I1007 08:45:04.328314 4875 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/1116c79f-2cba-46ea-a3d2-875d952bb032-secret-volume\") on node \"crc\" DevicePath \"\"" Oct 07 08:45:04 crc kubenswrapper[4875]: I1007 08:45:04.328383 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zmmw4\" (UniqueName: \"kubernetes.io/projected/1116c79f-2cba-46ea-a3d2-875d952bb032-kube-api-access-zmmw4\") on node \"crc\" DevicePath \"\"" Oct 07 08:45:04 crc kubenswrapper[4875]: I1007 08:45:04.649452 4875 generic.go:334] "Generic (PLEG): container finished" podID="1d84fb28-3d42-4968-a731-5aeae00fbb14" containerID="fffcefcd6716f297c221706b257893278522f107d8a1379ea1dca7daaeb1c3b5" exitCode=0 Oct 07 08:45:04 crc kubenswrapper[4875]: I1007 08:45:04.649552 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gprv9" event={"ID":"1d84fb28-3d42-4968-a731-5aeae00fbb14","Type":"ContainerDied","Data":"fffcefcd6716f297c221706b257893278522f107d8a1379ea1dca7daaeb1c3b5"} Oct 07 08:45:04 crc kubenswrapper[4875]: I1007 08:45:04.652832 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29330445-xmb46" event={"ID":"1116c79f-2cba-46ea-a3d2-875d952bb032","Type":"ContainerDied","Data":"25288a004e343fbb869b40f34e0859581bd6e6d55fd53d7e41f2de8107a70653"} Oct 07 08:45:04 crc kubenswrapper[4875]: I1007 08:45:04.653010 4875 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="25288a004e343fbb869b40f34e0859581bd6e6d55fd53d7e41f2de8107a70653" Oct 07 08:45:04 crc kubenswrapper[4875]: I1007 08:45:04.652920 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29330445-xmb46" Oct 07 08:45:04 crc kubenswrapper[4875]: I1007 08:45:04.717959 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29330400-j2pbn"] Oct 07 08:45:04 crc kubenswrapper[4875]: I1007 08:45:04.724949 4875 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29330400-j2pbn"] Oct 07 08:45:05 crc kubenswrapper[4875]: I1007 08:45:05.216719 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-q8cgt"] Oct 07 08:45:05 crc kubenswrapper[4875]: E1007 08:45:05.217250 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1116c79f-2cba-46ea-a3d2-875d952bb032" containerName="collect-profiles" Oct 07 08:45:05 crc kubenswrapper[4875]: I1007 08:45:05.217265 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="1116c79f-2cba-46ea-a3d2-875d952bb032" containerName="collect-profiles" Oct 07 08:45:05 crc kubenswrapper[4875]: I1007 08:45:05.217470 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="1116c79f-2cba-46ea-a3d2-875d952bb032" containerName="collect-profiles" Oct 07 08:45:05 crc kubenswrapper[4875]: I1007 08:45:05.218889 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-q8cgt" Oct 07 08:45:05 crc kubenswrapper[4875]: I1007 08:45:05.246429 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-q8cgt"] Oct 07 08:45:05 crc kubenswrapper[4875]: I1007 08:45:05.349180 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r8qnk\" (UniqueName: \"kubernetes.io/projected/3d72d9f0-7b3f-4f3e-8bd2-ad5a6956d5b1-kube-api-access-r8qnk\") pod \"redhat-marketplace-q8cgt\" (UID: \"3d72d9f0-7b3f-4f3e-8bd2-ad5a6956d5b1\") " pod="openshift-marketplace/redhat-marketplace-q8cgt" Oct 07 08:45:05 crc kubenswrapper[4875]: I1007 08:45:05.349611 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3d72d9f0-7b3f-4f3e-8bd2-ad5a6956d5b1-utilities\") pod \"redhat-marketplace-q8cgt\" (UID: \"3d72d9f0-7b3f-4f3e-8bd2-ad5a6956d5b1\") " pod="openshift-marketplace/redhat-marketplace-q8cgt" Oct 07 08:45:05 crc kubenswrapper[4875]: I1007 08:45:05.349900 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3d72d9f0-7b3f-4f3e-8bd2-ad5a6956d5b1-catalog-content\") pod \"redhat-marketplace-q8cgt\" (UID: \"3d72d9f0-7b3f-4f3e-8bd2-ad5a6956d5b1\") " pod="openshift-marketplace/redhat-marketplace-q8cgt" Oct 07 08:45:05 crc kubenswrapper[4875]: I1007 08:45:05.452338 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3d72d9f0-7b3f-4f3e-8bd2-ad5a6956d5b1-catalog-content\") pod \"redhat-marketplace-q8cgt\" (UID: \"3d72d9f0-7b3f-4f3e-8bd2-ad5a6956d5b1\") " pod="openshift-marketplace/redhat-marketplace-q8cgt" Oct 07 08:45:05 crc kubenswrapper[4875]: I1007 08:45:05.452484 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r8qnk\" (UniqueName: \"kubernetes.io/projected/3d72d9f0-7b3f-4f3e-8bd2-ad5a6956d5b1-kube-api-access-r8qnk\") pod \"redhat-marketplace-q8cgt\" (UID: \"3d72d9f0-7b3f-4f3e-8bd2-ad5a6956d5b1\") " pod="openshift-marketplace/redhat-marketplace-q8cgt" Oct 07 08:45:05 crc kubenswrapper[4875]: I1007 08:45:05.452518 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3d72d9f0-7b3f-4f3e-8bd2-ad5a6956d5b1-utilities\") pod \"redhat-marketplace-q8cgt\" (UID: \"3d72d9f0-7b3f-4f3e-8bd2-ad5a6956d5b1\") " pod="openshift-marketplace/redhat-marketplace-q8cgt" Oct 07 08:45:05 crc kubenswrapper[4875]: I1007 08:45:05.453072 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3d72d9f0-7b3f-4f3e-8bd2-ad5a6956d5b1-utilities\") pod \"redhat-marketplace-q8cgt\" (UID: \"3d72d9f0-7b3f-4f3e-8bd2-ad5a6956d5b1\") " pod="openshift-marketplace/redhat-marketplace-q8cgt" Oct 07 08:45:05 crc kubenswrapper[4875]: I1007 08:45:05.453361 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3d72d9f0-7b3f-4f3e-8bd2-ad5a6956d5b1-catalog-content\") pod \"redhat-marketplace-q8cgt\" (UID: \"3d72d9f0-7b3f-4f3e-8bd2-ad5a6956d5b1\") " pod="openshift-marketplace/redhat-marketplace-q8cgt" Oct 07 08:45:05 crc kubenswrapper[4875]: I1007 08:45:05.472699 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r8qnk\" (UniqueName: \"kubernetes.io/projected/3d72d9f0-7b3f-4f3e-8bd2-ad5a6956d5b1-kube-api-access-r8qnk\") pod \"redhat-marketplace-q8cgt\" (UID: \"3d72d9f0-7b3f-4f3e-8bd2-ad5a6956d5b1\") " pod="openshift-marketplace/redhat-marketplace-q8cgt" Oct 07 08:45:05 crc kubenswrapper[4875]: I1007 08:45:05.546038 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-q8cgt" Oct 07 08:45:05 crc kubenswrapper[4875]: I1007 08:45:05.665368 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gprv9" event={"ID":"1d84fb28-3d42-4968-a731-5aeae00fbb14","Type":"ContainerStarted","Data":"5ff1e95ec5f15291226df855e284d960600a3ae2213454938ebb39ba4f5b7843"} Oct 07 08:45:05 crc kubenswrapper[4875]: I1007 08:45:05.699526 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-gprv9" podStartSLOduration=2.155428036 podStartE2EDuration="4.699509787s" podCreationTimestamp="2025-10-07 08:45:01 +0000 UTC" firstStartedPulling="2025-10-07 08:45:02.633996855 +0000 UTC m=+2927.593767398" lastFinishedPulling="2025-10-07 08:45:05.178078606 +0000 UTC m=+2930.137849149" observedRunningTime="2025-10-07 08:45:05.695621473 +0000 UTC m=+2930.655392026" watchObservedRunningTime="2025-10-07 08:45:05.699509787 +0000 UTC m=+2930.659280330" Oct 07 08:45:05 crc kubenswrapper[4875]: I1007 08:45:05.735247 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="40d3c35c-8c62-46e5-8bb7-e013ac3a2a95" path="/var/lib/kubelet/pods/40d3c35c-8c62-46e5-8bb7-e013ac3a2a95/volumes" Oct 07 08:45:06 crc kubenswrapper[4875]: I1007 08:45:06.082197 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-q8cgt"] Oct 07 08:45:06 crc kubenswrapper[4875]: W1007 08:45:06.086811 4875 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3d72d9f0_7b3f_4f3e_8bd2_ad5a6956d5b1.slice/crio-bcf6a46eecf0ad9207152a23a6d88db78164e64541972f807b28a0827b1267d5 WatchSource:0}: Error finding container bcf6a46eecf0ad9207152a23a6d88db78164e64541972f807b28a0827b1267d5: Status 404 returned error can't find the container with id bcf6a46eecf0ad9207152a23a6d88db78164e64541972f807b28a0827b1267d5 Oct 07 08:45:06 crc kubenswrapper[4875]: I1007 08:45:06.674523 4875 generic.go:334] "Generic (PLEG): container finished" podID="3d72d9f0-7b3f-4f3e-8bd2-ad5a6956d5b1" containerID="d1d5895335c8ca0ec1cec96f5884266e89853430df8efd07cf42ac564954e461" exitCode=0 Oct 07 08:45:06 crc kubenswrapper[4875]: I1007 08:45:06.674612 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-q8cgt" event={"ID":"3d72d9f0-7b3f-4f3e-8bd2-ad5a6956d5b1","Type":"ContainerDied","Data":"d1d5895335c8ca0ec1cec96f5884266e89853430df8efd07cf42ac564954e461"} Oct 07 08:45:06 crc kubenswrapper[4875]: I1007 08:45:06.674859 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-q8cgt" event={"ID":"3d72d9f0-7b3f-4f3e-8bd2-ad5a6956d5b1","Type":"ContainerStarted","Data":"bcf6a46eecf0ad9207152a23a6d88db78164e64541972f807b28a0827b1267d5"} Oct 07 08:45:08 crc kubenswrapper[4875]: I1007 08:45:08.700768 4875 generic.go:334] "Generic (PLEG): container finished" podID="3d72d9f0-7b3f-4f3e-8bd2-ad5a6956d5b1" containerID="d2f8bd11c3495cbefcff21be5835bffca4074a1148d52ab7d2e141dda3be0425" exitCode=0 Oct 07 08:45:08 crc kubenswrapper[4875]: I1007 08:45:08.701187 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-q8cgt" event={"ID":"3d72d9f0-7b3f-4f3e-8bd2-ad5a6956d5b1","Type":"ContainerDied","Data":"d2f8bd11c3495cbefcff21be5835bffca4074a1148d52ab7d2e141dda3be0425"} Oct 07 08:45:09 crc kubenswrapper[4875]: I1007 08:45:09.770175 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-q8cgt" event={"ID":"3d72d9f0-7b3f-4f3e-8bd2-ad5a6956d5b1","Type":"ContainerStarted","Data":"7e2d3828904d92dfd15710c67d515385f8ee3f1bb49b75aed9b94693c896e04e"} Oct 07 08:45:09 crc kubenswrapper[4875]: I1007 08:45:09.798284 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-q8cgt" podStartSLOduration=2.306555217 podStartE2EDuration="4.798263584s" podCreationTimestamp="2025-10-07 08:45:05 +0000 UTC" firstStartedPulling="2025-10-07 08:45:06.676739671 +0000 UTC m=+2931.636510214" lastFinishedPulling="2025-10-07 08:45:09.168448038 +0000 UTC m=+2934.128218581" observedRunningTime="2025-10-07 08:45:09.793487942 +0000 UTC m=+2934.753258495" watchObservedRunningTime="2025-10-07 08:45:09.798263584 +0000 UTC m=+2934.758034137" Oct 07 08:45:11 crc kubenswrapper[4875]: I1007 08:45:11.729177 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-gprv9" Oct 07 08:45:11 crc kubenswrapper[4875]: I1007 08:45:11.729710 4875 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-gprv9" Oct 07 08:45:11 crc kubenswrapper[4875]: I1007 08:45:11.776170 4875 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-gprv9" Oct 07 08:45:11 crc kubenswrapper[4875]: I1007 08:45:11.836850 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-gprv9" Oct 07 08:45:13 crc kubenswrapper[4875]: I1007 08:45:13.198040 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-gprv9"] Oct 07 08:45:13 crc kubenswrapper[4875]: I1007 08:45:13.802771 4875 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-gprv9" podUID="1d84fb28-3d42-4968-a731-5aeae00fbb14" containerName="registry-server" containerID="cri-o://5ff1e95ec5f15291226df855e284d960600a3ae2213454938ebb39ba4f5b7843" gracePeriod=2 Oct 07 08:45:14 crc kubenswrapper[4875]: I1007 08:45:14.282854 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-gprv9" Oct 07 08:45:14 crc kubenswrapper[4875]: I1007 08:45:14.435801 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2knvr\" (UniqueName: \"kubernetes.io/projected/1d84fb28-3d42-4968-a731-5aeae00fbb14-kube-api-access-2knvr\") pod \"1d84fb28-3d42-4968-a731-5aeae00fbb14\" (UID: \"1d84fb28-3d42-4968-a731-5aeae00fbb14\") " Oct 07 08:45:14 crc kubenswrapper[4875]: I1007 08:45:14.436212 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d84fb28-3d42-4968-a731-5aeae00fbb14-catalog-content\") pod \"1d84fb28-3d42-4968-a731-5aeae00fbb14\" (UID: \"1d84fb28-3d42-4968-a731-5aeae00fbb14\") " Oct 07 08:45:14 crc kubenswrapper[4875]: I1007 08:45:14.436467 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d84fb28-3d42-4968-a731-5aeae00fbb14-utilities\") pod \"1d84fb28-3d42-4968-a731-5aeae00fbb14\" (UID: \"1d84fb28-3d42-4968-a731-5aeae00fbb14\") " Oct 07 08:45:14 crc kubenswrapper[4875]: I1007 08:45:14.437324 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d84fb28-3d42-4968-a731-5aeae00fbb14-utilities" (OuterVolumeSpecName: "utilities") pod "1d84fb28-3d42-4968-a731-5aeae00fbb14" (UID: "1d84fb28-3d42-4968-a731-5aeae00fbb14"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 08:45:14 crc kubenswrapper[4875]: I1007 08:45:14.450188 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d84fb28-3d42-4968-a731-5aeae00fbb14-kube-api-access-2knvr" (OuterVolumeSpecName: "kube-api-access-2knvr") pod "1d84fb28-3d42-4968-a731-5aeae00fbb14" (UID: "1d84fb28-3d42-4968-a731-5aeae00fbb14"). InnerVolumeSpecName "kube-api-access-2knvr". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 08:45:14 crc kubenswrapper[4875]: I1007 08:45:14.489237 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d84fb28-3d42-4968-a731-5aeae00fbb14-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1d84fb28-3d42-4968-a731-5aeae00fbb14" (UID: "1d84fb28-3d42-4968-a731-5aeae00fbb14"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 08:45:14 crc kubenswrapper[4875]: I1007 08:45:14.538801 4875 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d84fb28-3d42-4968-a731-5aeae00fbb14-utilities\") on node \"crc\" DevicePath \"\"" Oct 07 08:45:14 crc kubenswrapper[4875]: I1007 08:45:14.538833 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2knvr\" (UniqueName: \"kubernetes.io/projected/1d84fb28-3d42-4968-a731-5aeae00fbb14-kube-api-access-2knvr\") on node \"crc\" DevicePath \"\"" Oct 07 08:45:14 crc kubenswrapper[4875]: I1007 08:45:14.538845 4875 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d84fb28-3d42-4968-a731-5aeae00fbb14-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 07 08:45:14 crc kubenswrapper[4875]: I1007 08:45:14.697829 4875 scope.go:117] "RemoveContainer" containerID="fd0786c86e596eee0132032615ed75314bf901a7421dc6afedda1f75eccd9ec4" Oct 07 08:45:14 crc kubenswrapper[4875]: E1007 08:45:14.698161 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hx68m_openshift-machine-config-operator(3928c10c-c3da-41eb-96b2-629d67cfb31f)\"" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" podUID="3928c10c-c3da-41eb-96b2-629d67cfb31f" Oct 07 08:45:14 crc kubenswrapper[4875]: I1007 08:45:14.818366 4875 generic.go:334] "Generic (PLEG): container finished" podID="1d84fb28-3d42-4968-a731-5aeae00fbb14" containerID="5ff1e95ec5f15291226df855e284d960600a3ae2213454938ebb39ba4f5b7843" exitCode=0 Oct 07 08:45:14 crc kubenswrapper[4875]: I1007 08:45:14.818586 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-gprv9" Oct 07 08:45:14 crc kubenswrapper[4875]: I1007 08:45:14.818739 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gprv9" event={"ID":"1d84fb28-3d42-4968-a731-5aeae00fbb14","Type":"ContainerDied","Data":"5ff1e95ec5f15291226df855e284d960600a3ae2213454938ebb39ba4f5b7843"} Oct 07 08:45:14 crc kubenswrapper[4875]: I1007 08:45:14.818787 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gprv9" event={"ID":"1d84fb28-3d42-4968-a731-5aeae00fbb14","Type":"ContainerDied","Data":"97658d8a559821903017e84d3596757c3f4dd581d85aec8039ab13e6b665d369"} Oct 07 08:45:14 crc kubenswrapper[4875]: I1007 08:45:14.818831 4875 scope.go:117] "RemoveContainer" containerID="5ff1e95ec5f15291226df855e284d960600a3ae2213454938ebb39ba4f5b7843" Oct 07 08:45:14 crc kubenswrapper[4875]: I1007 08:45:14.849608 4875 scope.go:117] "RemoveContainer" containerID="fffcefcd6716f297c221706b257893278522f107d8a1379ea1dca7daaeb1c3b5" Oct 07 08:45:14 crc kubenswrapper[4875]: I1007 08:45:14.851365 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-gprv9"] Oct 07 08:45:14 crc kubenswrapper[4875]: I1007 08:45:14.860964 4875 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-gprv9"] Oct 07 08:45:14 crc kubenswrapper[4875]: I1007 08:45:14.875268 4875 scope.go:117] "RemoveContainer" containerID="b58e2bfbb5e2b92a3d90fc578787cca7f3f27fa418b542c13a009d7f6610159d" Oct 07 08:45:14 crc kubenswrapper[4875]: I1007 08:45:14.910944 4875 scope.go:117] "RemoveContainer" containerID="5ff1e95ec5f15291226df855e284d960600a3ae2213454938ebb39ba4f5b7843" Oct 07 08:45:14 crc kubenswrapper[4875]: E1007 08:45:14.911411 4875 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5ff1e95ec5f15291226df855e284d960600a3ae2213454938ebb39ba4f5b7843\": container with ID starting with 5ff1e95ec5f15291226df855e284d960600a3ae2213454938ebb39ba4f5b7843 not found: ID does not exist" containerID="5ff1e95ec5f15291226df855e284d960600a3ae2213454938ebb39ba4f5b7843" Oct 07 08:45:14 crc kubenswrapper[4875]: I1007 08:45:14.911460 4875 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5ff1e95ec5f15291226df855e284d960600a3ae2213454938ebb39ba4f5b7843"} err="failed to get container status \"5ff1e95ec5f15291226df855e284d960600a3ae2213454938ebb39ba4f5b7843\": rpc error: code = NotFound desc = could not find container \"5ff1e95ec5f15291226df855e284d960600a3ae2213454938ebb39ba4f5b7843\": container with ID starting with 5ff1e95ec5f15291226df855e284d960600a3ae2213454938ebb39ba4f5b7843 not found: ID does not exist" Oct 07 08:45:14 crc kubenswrapper[4875]: I1007 08:45:14.911489 4875 scope.go:117] "RemoveContainer" containerID="fffcefcd6716f297c221706b257893278522f107d8a1379ea1dca7daaeb1c3b5" Oct 07 08:45:14 crc kubenswrapper[4875]: E1007 08:45:14.911974 4875 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fffcefcd6716f297c221706b257893278522f107d8a1379ea1dca7daaeb1c3b5\": container with ID starting with fffcefcd6716f297c221706b257893278522f107d8a1379ea1dca7daaeb1c3b5 not found: ID does not exist" containerID="fffcefcd6716f297c221706b257893278522f107d8a1379ea1dca7daaeb1c3b5" Oct 07 08:45:14 crc kubenswrapper[4875]: I1007 08:45:14.912022 4875 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fffcefcd6716f297c221706b257893278522f107d8a1379ea1dca7daaeb1c3b5"} err="failed to get container status \"fffcefcd6716f297c221706b257893278522f107d8a1379ea1dca7daaeb1c3b5\": rpc error: code = NotFound desc = could not find container \"fffcefcd6716f297c221706b257893278522f107d8a1379ea1dca7daaeb1c3b5\": container with ID starting with fffcefcd6716f297c221706b257893278522f107d8a1379ea1dca7daaeb1c3b5 not found: ID does not exist" Oct 07 08:45:14 crc kubenswrapper[4875]: I1007 08:45:14.912055 4875 scope.go:117] "RemoveContainer" containerID="b58e2bfbb5e2b92a3d90fc578787cca7f3f27fa418b542c13a009d7f6610159d" Oct 07 08:45:14 crc kubenswrapper[4875]: E1007 08:45:14.912415 4875 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b58e2bfbb5e2b92a3d90fc578787cca7f3f27fa418b542c13a009d7f6610159d\": container with ID starting with b58e2bfbb5e2b92a3d90fc578787cca7f3f27fa418b542c13a009d7f6610159d not found: ID does not exist" containerID="b58e2bfbb5e2b92a3d90fc578787cca7f3f27fa418b542c13a009d7f6610159d" Oct 07 08:45:14 crc kubenswrapper[4875]: I1007 08:45:14.912444 4875 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b58e2bfbb5e2b92a3d90fc578787cca7f3f27fa418b542c13a009d7f6610159d"} err="failed to get container status \"b58e2bfbb5e2b92a3d90fc578787cca7f3f27fa418b542c13a009d7f6610159d\": rpc error: code = NotFound desc = could not find container \"b58e2bfbb5e2b92a3d90fc578787cca7f3f27fa418b542c13a009d7f6610159d\": container with ID starting with b58e2bfbb5e2b92a3d90fc578787cca7f3f27fa418b542c13a009d7f6610159d not found: ID does not exist" Oct 07 08:45:15 crc kubenswrapper[4875]: I1007 08:45:15.546268 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-q8cgt" Oct 07 08:45:15 crc kubenswrapper[4875]: I1007 08:45:15.547124 4875 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-q8cgt" Oct 07 08:45:15 crc kubenswrapper[4875]: I1007 08:45:15.593501 4875 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-q8cgt" Oct 07 08:45:15 crc kubenswrapper[4875]: I1007 08:45:15.711242 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d84fb28-3d42-4968-a731-5aeae00fbb14" path="/var/lib/kubelet/pods/1d84fb28-3d42-4968-a731-5aeae00fbb14/volumes" Oct 07 08:45:15 crc kubenswrapper[4875]: I1007 08:45:15.879551 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-q8cgt" Oct 07 08:45:17 crc kubenswrapper[4875]: I1007 08:45:17.403208 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-q8cgt"] Oct 07 08:45:17 crc kubenswrapper[4875]: I1007 08:45:17.849629 4875 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-q8cgt" podUID="3d72d9f0-7b3f-4f3e-8bd2-ad5a6956d5b1" containerName="registry-server" containerID="cri-o://7e2d3828904d92dfd15710c67d515385f8ee3f1bb49b75aed9b94693c896e04e" gracePeriod=2 Oct 07 08:45:18 crc kubenswrapper[4875]: I1007 08:45:18.370638 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-q8cgt" Oct 07 08:45:18 crc kubenswrapper[4875]: I1007 08:45:18.535776 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r8qnk\" (UniqueName: \"kubernetes.io/projected/3d72d9f0-7b3f-4f3e-8bd2-ad5a6956d5b1-kube-api-access-r8qnk\") pod \"3d72d9f0-7b3f-4f3e-8bd2-ad5a6956d5b1\" (UID: \"3d72d9f0-7b3f-4f3e-8bd2-ad5a6956d5b1\") " Oct 07 08:45:18 crc kubenswrapper[4875]: I1007 08:45:18.535838 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3d72d9f0-7b3f-4f3e-8bd2-ad5a6956d5b1-catalog-content\") pod \"3d72d9f0-7b3f-4f3e-8bd2-ad5a6956d5b1\" (UID: \"3d72d9f0-7b3f-4f3e-8bd2-ad5a6956d5b1\") " Oct 07 08:45:18 crc kubenswrapper[4875]: I1007 08:45:18.536024 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3d72d9f0-7b3f-4f3e-8bd2-ad5a6956d5b1-utilities\") pod \"3d72d9f0-7b3f-4f3e-8bd2-ad5a6956d5b1\" (UID: \"3d72d9f0-7b3f-4f3e-8bd2-ad5a6956d5b1\") " Oct 07 08:45:18 crc kubenswrapper[4875]: I1007 08:45:18.537156 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3d72d9f0-7b3f-4f3e-8bd2-ad5a6956d5b1-utilities" (OuterVolumeSpecName: "utilities") pod "3d72d9f0-7b3f-4f3e-8bd2-ad5a6956d5b1" (UID: "3d72d9f0-7b3f-4f3e-8bd2-ad5a6956d5b1"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 08:45:18 crc kubenswrapper[4875]: I1007 08:45:18.546300 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3d72d9f0-7b3f-4f3e-8bd2-ad5a6956d5b1-kube-api-access-r8qnk" (OuterVolumeSpecName: "kube-api-access-r8qnk") pod "3d72d9f0-7b3f-4f3e-8bd2-ad5a6956d5b1" (UID: "3d72d9f0-7b3f-4f3e-8bd2-ad5a6956d5b1"). InnerVolumeSpecName "kube-api-access-r8qnk". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 08:45:18 crc kubenswrapper[4875]: I1007 08:45:18.559058 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3d72d9f0-7b3f-4f3e-8bd2-ad5a6956d5b1-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "3d72d9f0-7b3f-4f3e-8bd2-ad5a6956d5b1" (UID: "3d72d9f0-7b3f-4f3e-8bd2-ad5a6956d5b1"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 08:45:18 crc kubenswrapper[4875]: I1007 08:45:18.639040 4875 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3d72d9f0-7b3f-4f3e-8bd2-ad5a6956d5b1-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 07 08:45:18 crc kubenswrapper[4875]: I1007 08:45:18.639753 4875 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3d72d9f0-7b3f-4f3e-8bd2-ad5a6956d5b1-utilities\") on node \"crc\" DevicePath \"\"" Oct 07 08:45:18 crc kubenswrapper[4875]: I1007 08:45:18.639936 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r8qnk\" (UniqueName: \"kubernetes.io/projected/3d72d9f0-7b3f-4f3e-8bd2-ad5a6956d5b1-kube-api-access-r8qnk\") on node \"crc\" DevicePath \"\"" Oct 07 08:45:18 crc kubenswrapper[4875]: I1007 08:45:18.862282 4875 generic.go:334] "Generic (PLEG): container finished" podID="3d72d9f0-7b3f-4f3e-8bd2-ad5a6956d5b1" containerID="7e2d3828904d92dfd15710c67d515385f8ee3f1bb49b75aed9b94693c896e04e" exitCode=0 Oct 07 08:45:18 crc kubenswrapper[4875]: I1007 08:45:18.862381 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-q8cgt" event={"ID":"3d72d9f0-7b3f-4f3e-8bd2-ad5a6956d5b1","Type":"ContainerDied","Data":"7e2d3828904d92dfd15710c67d515385f8ee3f1bb49b75aed9b94693c896e04e"} Oct 07 08:45:18 crc kubenswrapper[4875]: I1007 08:45:18.862417 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-q8cgt" event={"ID":"3d72d9f0-7b3f-4f3e-8bd2-ad5a6956d5b1","Type":"ContainerDied","Data":"bcf6a46eecf0ad9207152a23a6d88db78164e64541972f807b28a0827b1267d5"} Oct 07 08:45:18 crc kubenswrapper[4875]: I1007 08:45:18.862438 4875 scope.go:117] "RemoveContainer" containerID="7e2d3828904d92dfd15710c67d515385f8ee3f1bb49b75aed9b94693c896e04e" Oct 07 08:45:18 crc kubenswrapper[4875]: I1007 08:45:18.862437 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-q8cgt" Oct 07 08:45:18 crc kubenswrapper[4875]: I1007 08:45:18.884235 4875 scope.go:117] "RemoveContainer" containerID="d2f8bd11c3495cbefcff21be5835bffca4074a1148d52ab7d2e141dda3be0425" Oct 07 08:45:18 crc kubenswrapper[4875]: I1007 08:45:18.902835 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-q8cgt"] Oct 07 08:45:18 crc kubenswrapper[4875]: I1007 08:45:18.913421 4875 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-q8cgt"] Oct 07 08:45:18 crc kubenswrapper[4875]: I1007 08:45:18.925276 4875 scope.go:117] "RemoveContainer" containerID="d1d5895335c8ca0ec1cec96f5884266e89853430df8efd07cf42ac564954e461" Oct 07 08:45:18 crc kubenswrapper[4875]: I1007 08:45:18.971317 4875 scope.go:117] "RemoveContainer" containerID="7e2d3828904d92dfd15710c67d515385f8ee3f1bb49b75aed9b94693c896e04e" Oct 07 08:45:18 crc kubenswrapper[4875]: E1007 08:45:18.971782 4875 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7e2d3828904d92dfd15710c67d515385f8ee3f1bb49b75aed9b94693c896e04e\": container with ID starting with 7e2d3828904d92dfd15710c67d515385f8ee3f1bb49b75aed9b94693c896e04e not found: ID does not exist" containerID="7e2d3828904d92dfd15710c67d515385f8ee3f1bb49b75aed9b94693c896e04e" Oct 07 08:45:18 crc kubenswrapper[4875]: I1007 08:45:18.971816 4875 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7e2d3828904d92dfd15710c67d515385f8ee3f1bb49b75aed9b94693c896e04e"} err="failed to get container status \"7e2d3828904d92dfd15710c67d515385f8ee3f1bb49b75aed9b94693c896e04e\": rpc error: code = NotFound desc = could not find container \"7e2d3828904d92dfd15710c67d515385f8ee3f1bb49b75aed9b94693c896e04e\": container with ID starting with 7e2d3828904d92dfd15710c67d515385f8ee3f1bb49b75aed9b94693c896e04e not found: ID does not exist" Oct 07 08:45:18 crc kubenswrapper[4875]: I1007 08:45:18.971837 4875 scope.go:117] "RemoveContainer" containerID="d2f8bd11c3495cbefcff21be5835bffca4074a1148d52ab7d2e141dda3be0425" Oct 07 08:45:18 crc kubenswrapper[4875]: E1007 08:45:18.972364 4875 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d2f8bd11c3495cbefcff21be5835bffca4074a1148d52ab7d2e141dda3be0425\": container with ID starting with d2f8bd11c3495cbefcff21be5835bffca4074a1148d52ab7d2e141dda3be0425 not found: ID does not exist" containerID="d2f8bd11c3495cbefcff21be5835bffca4074a1148d52ab7d2e141dda3be0425" Oct 07 08:45:18 crc kubenswrapper[4875]: I1007 08:45:18.972426 4875 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d2f8bd11c3495cbefcff21be5835bffca4074a1148d52ab7d2e141dda3be0425"} err="failed to get container status \"d2f8bd11c3495cbefcff21be5835bffca4074a1148d52ab7d2e141dda3be0425\": rpc error: code = NotFound desc = could not find container \"d2f8bd11c3495cbefcff21be5835bffca4074a1148d52ab7d2e141dda3be0425\": container with ID starting with d2f8bd11c3495cbefcff21be5835bffca4074a1148d52ab7d2e141dda3be0425 not found: ID does not exist" Oct 07 08:45:18 crc kubenswrapper[4875]: I1007 08:45:18.972461 4875 scope.go:117] "RemoveContainer" containerID="d1d5895335c8ca0ec1cec96f5884266e89853430df8efd07cf42ac564954e461" Oct 07 08:45:18 crc kubenswrapper[4875]: E1007 08:45:18.972801 4875 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d1d5895335c8ca0ec1cec96f5884266e89853430df8efd07cf42ac564954e461\": container with ID starting with d1d5895335c8ca0ec1cec96f5884266e89853430df8efd07cf42ac564954e461 not found: ID does not exist" containerID="d1d5895335c8ca0ec1cec96f5884266e89853430df8efd07cf42ac564954e461" Oct 07 08:45:18 crc kubenswrapper[4875]: I1007 08:45:18.972836 4875 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d1d5895335c8ca0ec1cec96f5884266e89853430df8efd07cf42ac564954e461"} err="failed to get container status \"d1d5895335c8ca0ec1cec96f5884266e89853430df8efd07cf42ac564954e461\": rpc error: code = NotFound desc = could not find container \"d1d5895335c8ca0ec1cec96f5884266e89853430df8efd07cf42ac564954e461\": container with ID starting with d1d5895335c8ca0ec1cec96f5884266e89853430df8efd07cf42ac564954e461 not found: ID does not exist" Oct 07 08:45:19 crc kubenswrapper[4875]: I1007 08:45:19.709341 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3d72d9f0-7b3f-4f3e-8bd2-ad5a6956d5b1" path="/var/lib/kubelet/pods/3d72d9f0-7b3f-4f3e-8bd2-ad5a6956d5b1/volumes" Oct 07 08:45:20 crc kubenswrapper[4875]: I1007 08:45:20.861858 4875 scope.go:117] "RemoveContainer" containerID="0f0705d63b621eb3929ff72e7946c29b9c2a70d4d688999fd1752a0b08d86ab9" Oct 07 08:45:28 crc kubenswrapper[4875]: I1007 08:45:28.698452 4875 scope.go:117] "RemoveContainer" containerID="fd0786c86e596eee0132032615ed75314bf901a7421dc6afedda1f75eccd9ec4" Oct 07 08:45:28 crc kubenswrapper[4875]: E1007 08:45:28.699603 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hx68m_openshift-machine-config-operator(3928c10c-c3da-41eb-96b2-629d67cfb31f)\"" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" podUID="3928c10c-c3da-41eb-96b2-629d67cfb31f" Oct 07 08:45:43 crc kubenswrapper[4875]: I1007 08:45:43.698694 4875 scope.go:117] "RemoveContainer" containerID="fd0786c86e596eee0132032615ed75314bf901a7421dc6afedda1f75eccd9ec4" Oct 07 08:45:43 crc kubenswrapper[4875]: E1007 08:45:43.699430 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hx68m_openshift-machine-config-operator(3928c10c-c3da-41eb-96b2-629d67cfb31f)\"" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" podUID="3928c10c-c3da-41eb-96b2-629d67cfb31f" Oct 07 08:45:55 crc kubenswrapper[4875]: I1007 08:45:55.702735 4875 scope.go:117] "RemoveContainer" containerID="fd0786c86e596eee0132032615ed75314bf901a7421dc6afedda1f75eccd9ec4" Oct 07 08:45:55 crc kubenswrapper[4875]: E1007 08:45:55.703512 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hx68m_openshift-machine-config-operator(3928c10c-c3da-41eb-96b2-629d67cfb31f)\"" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" podUID="3928c10c-c3da-41eb-96b2-629d67cfb31f" Oct 07 08:46:07 crc kubenswrapper[4875]: I1007 08:46:07.697593 4875 scope.go:117] "RemoveContainer" containerID="fd0786c86e596eee0132032615ed75314bf901a7421dc6afedda1f75eccd9ec4" Oct 07 08:46:07 crc kubenswrapper[4875]: E1007 08:46:07.699023 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hx68m_openshift-machine-config-operator(3928c10c-c3da-41eb-96b2-629d67cfb31f)\"" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" podUID="3928c10c-c3da-41eb-96b2-629d67cfb31f" Oct 07 08:46:22 crc kubenswrapper[4875]: I1007 08:46:22.698145 4875 scope.go:117] "RemoveContainer" containerID="fd0786c86e596eee0132032615ed75314bf901a7421dc6afedda1f75eccd9ec4" Oct 07 08:46:22 crc kubenswrapper[4875]: E1007 08:46:22.698978 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hx68m_openshift-machine-config-operator(3928c10c-c3da-41eb-96b2-629d67cfb31f)\"" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" podUID="3928c10c-c3da-41eb-96b2-629d67cfb31f" Oct 07 08:46:36 crc kubenswrapper[4875]: I1007 08:46:36.697744 4875 scope.go:117] "RemoveContainer" containerID="fd0786c86e596eee0132032615ed75314bf901a7421dc6afedda1f75eccd9ec4" Oct 07 08:46:36 crc kubenswrapper[4875]: E1007 08:46:36.698600 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hx68m_openshift-machine-config-operator(3928c10c-c3da-41eb-96b2-629d67cfb31f)\"" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" podUID="3928c10c-c3da-41eb-96b2-629d67cfb31f" Oct 07 08:46:48 crc kubenswrapper[4875]: I1007 08:46:48.697800 4875 scope.go:117] "RemoveContainer" containerID="fd0786c86e596eee0132032615ed75314bf901a7421dc6afedda1f75eccd9ec4" Oct 07 08:46:48 crc kubenswrapper[4875]: E1007 08:46:48.698819 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hx68m_openshift-machine-config-operator(3928c10c-c3da-41eb-96b2-629d67cfb31f)\"" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" podUID="3928c10c-c3da-41eb-96b2-629d67cfb31f" Oct 07 08:47:03 crc kubenswrapper[4875]: I1007 08:47:03.697856 4875 scope.go:117] "RemoveContainer" containerID="fd0786c86e596eee0132032615ed75314bf901a7421dc6afedda1f75eccd9ec4" Oct 07 08:47:03 crc kubenswrapper[4875]: E1007 08:47:03.698740 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hx68m_openshift-machine-config-operator(3928c10c-c3da-41eb-96b2-629d67cfb31f)\"" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" podUID="3928c10c-c3da-41eb-96b2-629d67cfb31f" Oct 07 08:47:17 crc kubenswrapper[4875]: I1007 08:47:17.698297 4875 scope.go:117] "RemoveContainer" containerID="fd0786c86e596eee0132032615ed75314bf901a7421dc6afedda1f75eccd9ec4" Oct 07 08:47:17 crc kubenswrapper[4875]: E1007 08:47:17.699613 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hx68m_openshift-machine-config-operator(3928c10c-c3da-41eb-96b2-629d67cfb31f)\"" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" podUID="3928c10c-c3da-41eb-96b2-629d67cfb31f" Oct 07 08:47:31 crc kubenswrapper[4875]: I1007 08:47:31.710415 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-97sqk"] Oct 07 08:47:31 crc kubenswrapper[4875]: E1007 08:47:31.711405 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1d84fb28-3d42-4968-a731-5aeae00fbb14" containerName="extract-utilities" Oct 07 08:47:31 crc kubenswrapper[4875]: I1007 08:47:31.711422 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="1d84fb28-3d42-4968-a731-5aeae00fbb14" containerName="extract-utilities" Oct 07 08:47:31 crc kubenswrapper[4875]: E1007 08:47:31.711437 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3d72d9f0-7b3f-4f3e-8bd2-ad5a6956d5b1" containerName="extract-content" Oct 07 08:47:31 crc kubenswrapper[4875]: I1007 08:47:31.711446 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="3d72d9f0-7b3f-4f3e-8bd2-ad5a6956d5b1" containerName="extract-content" Oct 07 08:47:31 crc kubenswrapper[4875]: E1007 08:47:31.711464 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1d84fb28-3d42-4968-a731-5aeae00fbb14" containerName="registry-server" Oct 07 08:47:31 crc kubenswrapper[4875]: I1007 08:47:31.711471 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="1d84fb28-3d42-4968-a731-5aeae00fbb14" containerName="registry-server" Oct 07 08:47:31 crc kubenswrapper[4875]: E1007 08:47:31.711488 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3d72d9f0-7b3f-4f3e-8bd2-ad5a6956d5b1" containerName="registry-server" Oct 07 08:47:31 crc kubenswrapper[4875]: I1007 08:47:31.711495 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="3d72d9f0-7b3f-4f3e-8bd2-ad5a6956d5b1" containerName="registry-server" Oct 07 08:47:31 crc kubenswrapper[4875]: E1007 08:47:31.711505 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3d72d9f0-7b3f-4f3e-8bd2-ad5a6956d5b1" containerName="extract-utilities" Oct 07 08:47:31 crc kubenswrapper[4875]: I1007 08:47:31.711512 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="3d72d9f0-7b3f-4f3e-8bd2-ad5a6956d5b1" containerName="extract-utilities" Oct 07 08:47:31 crc kubenswrapper[4875]: E1007 08:47:31.711524 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1d84fb28-3d42-4968-a731-5aeae00fbb14" containerName="extract-content" Oct 07 08:47:31 crc kubenswrapper[4875]: I1007 08:47:31.711529 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="1d84fb28-3d42-4968-a731-5aeae00fbb14" containerName="extract-content" Oct 07 08:47:31 crc kubenswrapper[4875]: I1007 08:47:31.711750 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="1d84fb28-3d42-4968-a731-5aeae00fbb14" containerName="registry-server" Oct 07 08:47:31 crc kubenswrapper[4875]: I1007 08:47:31.711781 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="3d72d9f0-7b3f-4f3e-8bd2-ad5a6956d5b1" containerName="registry-server" Oct 07 08:47:31 crc kubenswrapper[4875]: I1007 08:47:31.722563 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-97sqk" Oct 07 08:47:31 crc kubenswrapper[4875]: I1007 08:47:31.757261 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-97sqk"] Oct 07 08:47:31 crc kubenswrapper[4875]: I1007 08:47:31.915387 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/60861eac-b95b-4860-b797-4ed50c5768d5-catalog-content\") pod \"redhat-operators-97sqk\" (UID: \"60861eac-b95b-4860-b797-4ed50c5768d5\") " pod="openshift-marketplace/redhat-operators-97sqk" Oct 07 08:47:31 crc kubenswrapper[4875]: I1007 08:47:31.915451 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tnggf\" (UniqueName: \"kubernetes.io/projected/60861eac-b95b-4860-b797-4ed50c5768d5-kube-api-access-tnggf\") pod \"redhat-operators-97sqk\" (UID: \"60861eac-b95b-4860-b797-4ed50c5768d5\") " pod="openshift-marketplace/redhat-operators-97sqk" Oct 07 08:47:31 crc kubenswrapper[4875]: I1007 08:47:31.915539 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/60861eac-b95b-4860-b797-4ed50c5768d5-utilities\") pod \"redhat-operators-97sqk\" (UID: \"60861eac-b95b-4860-b797-4ed50c5768d5\") " pod="openshift-marketplace/redhat-operators-97sqk" Oct 07 08:47:32 crc kubenswrapper[4875]: I1007 08:47:32.017590 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/60861eac-b95b-4860-b797-4ed50c5768d5-catalog-content\") pod \"redhat-operators-97sqk\" (UID: \"60861eac-b95b-4860-b797-4ed50c5768d5\") " pod="openshift-marketplace/redhat-operators-97sqk" Oct 07 08:47:32 crc kubenswrapper[4875]: I1007 08:47:32.017659 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tnggf\" (UniqueName: \"kubernetes.io/projected/60861eac-b95b-4860-b797-4ed50c5768d5-kube-api-access-tnggf\") pod \"redhat-operators-97sqk\" (UID: \"60861eac-b95b-4860-b797-4ed50c5768d5\") " pod="openshift-marketplace/redhat-operators-97sqk" Oct 07 08:47:32 crc kubenswrapper[4875]: I1007 08:47:32.017744 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/60861eac-b95b-4860-b797-4ed50c5768d5-utilities\") pod \"redhat-operators-97sqk\" (UID: \"60861eac-b95b-4860-b797-4ed50c5768d5\") " pod="openshift-marketplace/redhat-operators-97sqk" Oct 07 08:47:32 crc kubenswrapper[4875]: I1007 08:47:32.018310 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/60861eac-b95b-4860-b797-4ed50c5768d5-utilities\") pod \"redhat-operators-97sqk\" (UID: \"60861eac-b95b-4860-b797-4ed50c5768d5\") " pod="openshift-marketplace/redhat-operators-97sqk" Oct 07 08:47:32 crc kubenswrapper[4875]: I1007 08:47:32.018313 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/60861eac-b95b-4860-b797-4ed50c5768d5-catalog-content\") pod \"redhat-operators-97sqk\" (UID: \"60861eac-b95b-4860-b797-4ed50c5768d5\") " pod="openshift-marketplace/redhat-operators-97sqk" Oct 07 08:47:32 crc kubenswrapper[4875]: I1007 08:47:32.039953 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tnggf\" (UniqueName: \"kubernetes.io/projected/60861eac-b95b-4860-b797-4ed50c5768d5-kube-api-access-tnggf\") pod \"redhat-operators-97sqk\" (UID: \"60861eac-b95b-4860-b797-4ed50c5768d5\") " pod="openshift-marketplace/redhat-operators-97sqk" Oct 07 08:47:32 crc kubenswrapper[4875]: I1007 08:47:32.061343 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-97sqk" Oct 07 08:47:32 crc kubenswrapper[4875]: I1007 08:47:32.589199 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-97sqk"] Oct 07 08:47:32 crc kubenswrapper[4875]: I1007 08:47:32.698983 4875 scope.go:117] "RemoveContainer" containerID="fd0786c86e596eee0132032615ed75314bf901a7421dc6afedda1f75eccd9ec4" Oct 07 08:47:32 crc kubenswrapper[4875]: E1007 08:47:32.699482 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hx68m_openshift-machine-config-operator(3928c10c-c3da-41eb-96b2-629d67cfb31f)\"" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" podUID="3928c10c-c3da-41eb-96b2-629d67cfb31f" Oct 07 08:47:33 crc kubenswrapper[4875]: I1007 08:47:33.027016 4875 generic.go:334] "Generic (PLEG): container finished" podID="60861eac-b95b-4860-b797-4ed50c5768d5" containerID="076551c033086af75bfa61274e3e1fc055d69c0649f6612491e4ed80b91abd7d" exitCode=0 Oct 07 08:47:33 crc kubenswrapper[4875]: I1007 08:47:33.027059 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-97sqk" event={"ID":"60861eac-b95b-4860-b797-4ed50c5768d5","Type":"ContainerDied","Data":"076551c033086af75bfa61274e3e1fc055d69c0649f6612491e4ed80b91abd7d"} Oct 07 08:47:33 crc kubenswrapper[4875]: I1007 08:47:33.027088 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-97sqk" event={"ID":"60861eac-b95b-4860-b797-4ed50c5768d5","Type":"ContainerStarted","Data":"5a12f620921d07b9bab87bb8f89ae4d7000e1ec471c21b49552d6f86373d617a"} Oct 07 08:47:34 crc kubenswrapper[4875]: I1007 08:47:34.037691 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-97sqk" event={"ID":"60861eac-b95b-4860-b797-4ed50c5768d5","Type":"ContainerStarted","Data":"42fc64ccf6a0a739d90072b4ecd03066aea47e20eae7ef0b5d576b98fc1b12e9"} Oct 07 08:47:35 crc kubenswrapper[4875]: I1007 08:47:35.048389 4875 generic.go:334] "Generic (PLEG): container finished" podID="60861eac-b95b-4860-b797-4ed50c5768d5" containerID="42fc64ccf6a0a739d90072b4ecd03066aea47e20eae7ef0b5d576b98fc1b12e9" exitCode=0 Oct 07 08:47:35 crc kubenswrapper[4875]: I1007 08:47:35.049000 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-97sqk" event={"ID":"60861eac-b95b-4860-b797-4ed50c5768d5","Type":"ContainerDied","Data":"42fc64ccf6a0a739d90072b4ecd03066aea47e20eae7ef0b5d576b98fc1b12e9"} Oct 07 08:47:37 crc kubenswrapper[4875]: I1007 08:47:37.066518 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-97sqk" event={"ID":"60861eac-b95b-4860-b797-4ed50c5768d5","Type":"ContainerStarted","Data":"f4b0cabe9a4b954117b98b8f347974a014373cd2040f70ea821061b533801a3e"} Oct 07 08:47:37 crc kubenswrapper[4875]: I1007 08:47:37.094008 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-97sqk" podStartSLOduration=2.6871154390000003 podStartE2EDuration="6.093987065s" podCreationTimestamp="2025-10-07 08:47:31 +0000 UTC" firstStartedPulling="2025-10-07 08:47:33.028840892 +0000 UTC m=+3077.988611435" lastFinishedPulling="2025-10-07 08:47:36.435712518 +0000 UTC m=+3081.395483061" observedRunningTime="2025-10-07 08:47:37.082498587 +0000 UTC m=+3082.042269130" watchObservedRunningTime="2025-10-07 08:47:37.093987065 +0000 UTC m=+3082.053757618" Oct 07 08:47:42 crc kubenswrapper[4875]: I1007 08:47:42.062106 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-97sqk" Oct 07 08:47:42 crc kubenswrapper[4875]: I1007 08:47:42.062951 4875 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-97sqk" Oct 07 08:47:42 crc kubenswrapper[4875]: I1007 08:47:42.117964 4875 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-97sqk" Oct 07 08:47:43 crc kubenswrapper[4875]: I1007 08:47:43.167600 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-97sqk" Oct 07 08:47:43 crc kubenswrapper[4875]: I1007 08:47:43.219438 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-97sqk"] Oct 07 08:47:45 crc kubenswrapper[4875]: I1007 08:47:45.135775 4875 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-97sqk" podUID="60861eac-b95b-4860-b797-4ed50c5768d5" containerName="registry-server" containerID="cri-o://f4b0cabe9a4b954117b98b8f347974a014373cd2040f70ea821061b533801a3e" gracePeriod=2 Oct 07 08:47:45 crc kubenswrapper[4875]: I1007 08:47:45.609925 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-97sqk" Oct 07 08:47:45 crc kubenswrapper[4875]: I1007 08:47:45.703257 4875 scope.go:117] "RemoveContainer" containerID="fd0786c86e596eee0132032615ed75314bf901a7421dc6afedda1f75eccd9ec4" Oct 07 08:47:45 crc kubenswrapper[4875]: E1007 08:47:45.703807 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hx68m_openshift-machine-config-operator(3928c10c-c3da-41eb-96b2-629d67cfb31f)\"" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" podUID="3928c10c-c3da-41eb-96b2-629d67cfb31f" Oct 07 08:47:45 crc kubenswrapper[4875]: I1007 08:47:45.780625 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/60861eac-b95b-4860-b797-4ed50c5768d5-catalog-content\") pod \"60861eac-b95b-4860-b797-4ed50c5768d5\" (UID: \"60861eac-b95b-4860-b797-4ed50c5768d5\") " Oct 07 08:47:45 crc kubenswrapper[4875]: I1007 08:47:45.780713 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tnggf\" (UniqueName: \"kubernetes.io/projected/60861eac-b95b-4860-b797-4ed50c5768d5-kube-api-access-tnggf\") pod \"60861eac-b95b-4860-b797-4ed50c5768d5\" (UID: \"60861eac-b95b-4860-b797-4ed50c5768d5\") " Oct 07 08:47:45 crc kubenswrapper[4875]: I1007 08:47:45.780836 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/60861eac-b95b-4860-b797-4ed50c5768d5-utilities\") pod \"60861eac-b95b-4860-b797-4ed50c5768d5\" (UID: \"60861eac-b95b-4860-b797-4ed50c5768d5\") " Oct 07 08:47:45 crc kubenswrapper[4875]: I1007 08:47:45.781792 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/60861eac-b95b-4860-b797-4ed50c5768d5-utilities" (OuterVolumeSpecName: "utilities") pod "60861eac-b95b-4860-b797-4ed50c5768d5" (UID: "60861eac-b95b-4860-b797-4ed50c5768d5"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 08:47:45 crc kubenswrapper[4875]: I1007 08:47:45.786205 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/60861eac-b95b-4860-b797-4ed50c5768d5-kube-api-access-tnggf" (OuterVolumeSpecName: "kube-api-access-tnggf") pod "60861eac-b95b-4860-b797-4ed50c5768d5" (UID: "60861eac-b95b-4860-b797-4ed50c5768d5"). InnerVolumeSpecName "kube-api-access-tnggf". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 08:47:45 crc kubenswrapper[4875]: I1007 08:47:45.855491 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/60861eac-b95b-4860-b797-4ed50c5768d5-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "60861eac-b95b-4860-b797-4ed50c5768d5" (UID: "60861eac-b95b-4860-b797-4ed50c5768d5"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 08:47:45 crc kubenswrapper[4875]: I1007 08:47:45.883466 4875 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/60861eac-b95b-4860-b797-4ed50c5768d5-utilities\") on node \"crc\" DevicePath \"\"" Oct 07 08:47:45 crc kubenswrapper[4875]: I1007 08:47:45.883494 4875 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/60861eac-b95b-4860-b797-4ed50c5768d5-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 07 08:47:45 crc kubenswrapper[4875]: I1007 08:47:45.883504 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tnggf\" (UniqueName: \"kubernetes.io/projected/60861eac-b95b-4860-b797-4ed50c5768d5-kube-api-access-tnggf\") on node \"crc\" DevicePath \"\"" Oct 07 08:47:46 crc kubenswrapper[4875]: I1007 08:47:46.145827 4875 generic.go:334] "Generic (PLEG): container finished" podID="60861eac-b95b-4860-b797-4ed50c5768d5" containerID="f4b0cabe9a4b954117b98b8f347974a014373cd2040f70ea821061b533801a3e" exitCode=0 Oct 07 08:47:46 crc kubenswrapper[4875]: I1007 08:47:46.145890 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-97sqk" Oct 07 08:47:46 crc kubenswrapper[4875]: I1007 08:47:46.145890 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-97sqk" event={"ID":"60861eac-b95b-4860-b797-4ed50c5768d5","Type":"ContainerDied","Data":"f4b0cabe9a4b954117b98b8f347974a014373cd2040f70ea821061b533801a3e"} Oct 07 08:47:46 crc kubenswrapper[4875]: I1007 08:47:46.145947 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-97sqk" event={"ID":"60861eac-b95b-4860-b797-4ed50c5768d5","Type":"ContainerDied","Data":"5a12f620921d07b9bab87bb8f89ae4d7000e1ec471c21b49552d6f86373d617a"} Oct 07 08:47:46 crc kubenswrapper[4875]: I1007 08:47:46.145968 4875 scope.go:117] "RemoveContainer" containerID="f4b0cabe9a4b954117b98b8f347974a014373cd2040f70ea821061b533801a3e" Oct 07 08:47:46 crc kubenswrapper[4875]: I1007 08:47:46.176485 4875 scope.go:117] "RemoveContainer" containerID="42fc64ccf6a0a739d90072b4ecd03066aea47e20eae7ef0b5d576b98fc1b12e9" Oct 07 08:47:46 crc kubenswrapper[4875]: I1007 08:47:46.180964 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-97sqk"] Oct 07 08:47:46 crc kubenswrapper[4875]: I1007 08:47:46.192148 4875 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-97sqk"] Oct 07 08:47:46 crc kubenswrapper[4875]: I1007 08:47:46.211585 4875 scope.go:117] "RemoveContainer" containerID="076551c033086af75bfa61274e3e1fc055d69c0649f6612491e4ed80b91abd7d" Oct 07 08:47:46 crc kubenswrapper[4875]: I1007 08:47:46.245529 4875 scope.go:117] "RemoveContainer" containerID="f4b0cabe9a4b954117b98b8f347974a014373cd2040f70ea821061b533801a3e" Oct 07 08:47:46 crc kubenswrapper[4875]: E1007 08:47:46.246081 4875 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f4b0cabe9a4b954117b98b8f347974a014373cd2040f70ea821061b533801a3e\": container with ID starting with f4b0cabe9a4b954117b98b8f347974a014373cd2040f70ea821061b533801a3e not found: ID does not exist" containerID="f4b0cabe9a4b954117b98b8f347974a014373cd2040f70ea821061b533801a3e" Oct 07 08:47:46 crc kubenswrapper[4875]: I1007 08:47:46.246128 4875 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f4b0cabe9a4b954117b98b8f347974a014373cd2040f70ea821061b533801a3e"} err="failed to get container status \"f4b0cabe9a4b954117b98b8f347974a014373cd2040f70ea821061b533801a3e\": rpc error: code = NotFound desc = could not find container \"f4b0cabe9a4b954117b98b8f347974a014373cd2040f70ea821061b533801a3e\": container with ID starting with f4b0cabe9a4b954117b98b8f347974a014373cd2040f70ea821061b533801a3e not found: ID does not exist" Oct 07 08:47:46 crc kubenswrapper[4875]: I1007 08:47:46.246158 4875 scope.go:117] "RemoveContainer" containerID="42fc64ccf6a0a739d90072b4ecd03066aea47e20eae7ef0b5d576b98fc1b12e9" Oct 07 08:47:46 crc kubenswrapper[4875]: E1007 08:47:46.246609 4875 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"42fc64ccf6a0a739d90072b4ecd03066aea47e20eae7ef0b5d576b98fc1b12e9\": container with ID starting with 42fc64ccf6a0a739d90072b4ecd03066aea47e20eae7ef0b5d576b98fc1b12e9 not found: ID does not exist" containerID="42fc64ccf6a0a739d90072b4ecd03066aea47e20eae7ef0b5d576b98fc1b12e9" Oct 07 08:47:46 crc kubenswrapper[4875]: I1007 08:47:46.246661 4875 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"42fc64ccf6a0a739d90072b4ecd03066aea47e20eae7ef0b5d576b98fc1b12e9"} err="failed to get container status \"42fc64ccf6a0a739d90072b4ecd03066aea47e20eae7ef0b5d576b98fc1b12e9\": rpc error: code = NotFound desc = could not find container \"42fc64ccf6a0a739d90072b4ecd03066aea47e20eae7ef0b5d576b98fc1b12e9\": container with ID starting with 42fc64ccf6a0a739d90072b4ecd03066aea47e20eae7ef0b5d576b98fc1b12e9 not found: ID does not exist" Oct 07 08:47:46 crc kubenswrapper[4875]: I1007 08:47:46.246694 4875 scope.go:117] "RemoveContainer" containerID="076551c033086af75bfa61274e3e1fc055d69c0649f6612491e4ed80b91abd7d" Oct 07 08:47:46 crc kubenswrapper[4875]: E1007 08:47:46.247051 4875 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"076551c033086af75bfa61274e3e1fc055d69c0649f6612491e4ed80b91abd7d\": container with ID starting with 076551c033086af75bfa61274e3e1fc055d69c0649f6612491e4ed80b91abd7d not found: ID does not exist" containerID="076551c033086af75bfa61274e3e1fc055d69c0649f6612491e4ed80b91abd7d" Oct 07 08:47:46 crc kubenswrapper[4875]: I1007 08:47:46.247118 4875 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"076551c033086af75bfa61274e3e1fc055d69c0649f6612491e4ed80b91abd7d"} err="failed to get container status \"076551c033086af75bfa61274e3e1fc055d69c0649f6612491e4ed80b91abd7d\": rpc error: code = NotFound desc = could not find container \"076551c033086af75bfa61274e3e1fc055d69c0649f6612491e4ed80b91abd7d\": container with ID starting with 076551c033086af75bfa61274e3e1fc055d69c0649f6612491e4ed80b91abd7d not found: ID does not exist" Oct 07 08:47:47 crc kubenswrapper[4875]: I1007 08:47:47.707219 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="60861eac-b95b-4860-b797-4ed50c5768d5" path="/var/lib/kubelet/pods/60861eac-b95b-4860-b797-4ed50c5768d5/volumes" Oct 07 08:47:58 crc kubenswrapper[4875]: I1007 08:47:58.697376 4875 scope.go:117] "RemoveContainer" containerID="fd0786c86e596eee0132032615ed75314bf901a7421dc6afedda1f75eccd9ec4" Oct 07 08:47:58 crc kubenswrapper[4875]: E1007 08:47:58.698429 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hx68m_openshift-machine-config-operator(3928c10c-c3da-41eb-96b2-629d67cfb31f)\"" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" podUID="3928c10c-c3da-41eb-96b2-629d67cfb31f" Oct 07 08:48:12 crc kubenswrapper[4875]: I1007 08:48:12.697687 4875 scope.go:117] "RemoveContainer" containerID="fd0786c86e596eee0132032615ed75314bf901a7421dc6afedda1f75eccd9ec4" Oct 07 08:48:12 crc kubenswrapper[4875]: E1007 08:48:12.698469 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hx68m_openshift-machine-config-operator(3928c10c-c3da-41eb-96b2-629d67cfb31f)\"" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" podUID="3928c10c-c3da-41eb-96b2-629d67cfb31f" Oct 07 08:48:27 crc kubenswrapper[4875]: I1007 08:48:27.698589 4875 scope.go:117] "RemoveContainer" containerID="fd0786c86e596eee0132032615ed75314bf901a7421dc6afedda1f75eccd9ec4" Oct 07 08:48:27 crc kubenswrapper[4875]: E1007 08:48:27.699489 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hx68m_openshift-machine-config-operator(3928c10c-c3da-41eb-96b2-629d67cfb31f)\"" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" podUID="3928c10c-c3da-41eb-96b2-629d67cfb31f" Oct 07 08:48:41 crc kubenswrapper[4875]: I1007 08:48:41.698465 4875 scope.go:117] "RemoveContainer" containerID="fd0786c86e596eee0132032615ed75314bf901a7421dc6afedda1f75eccd9ec4" Oct 07 08:48:41 crc kubenswrapper[4875]: E1007 08:48:41.699415 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hx68m_openshift-machine-config-operator(3928c10c-c3da-41eb-96b2-629d67cfb31f)\"" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" podUID="3928c10c-c3da-41eb-96b2-629d67cfb31f" Oct 07 08:48:56 crc kubenswrapper[4875]: I1007 08:48:56.698287 4875 scope.go:117] "RemoveContainer" containerID="fd0786c86e596eee0132032615ed75314bf901a7421dc6afedda1f75eccd9ec4" Oct 07 08:48:56 crc kubenswrapper[4875]: E1007 08:48:56.699502 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hx68m_openshift-machine-config-operator(3928c10c-c3da-41eb-96b2-629d67cfb31f)\"" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" podUID="3928c10c-c3da-41eb-96b2-629d67cfb31f" Oct 07 08:49:07 crc kubenswrapper[4875]: I1007 08:49:07.697486 4875 scope.go:117] "RemoveContainer" containerID="fd0786c86e596eee0132032615ed75314bf901a7421dc6afedda1f75eccd9ec4" Oct 07 08:49:07 crc kubenswrapper[4875]: E1007 08:49:07.698534 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hx68m_openshift-machine-config-operator(3928c10c-c3da-41eb-96b2-629d67cfb31f)\"" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" podUID="3928c10c-c3da-41eb-96b2-629d67cfb31f" Oct 07 08:49:20 crc kubenswrapper[4875]: I1007 08:49:20.698393 4875 scope.go:117] "RemoveContainer" containerID="fd0786c86e596eee0132032615ed75314bf901a7421dc6afedda1f75eccd9ec4" Oct 07 08:49:20 crc kubenswrapper[4875]: E1007 08:49:20.699415 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hx68m_openshift-machine-config-operator(3928c10c-c3da-41eb-96b2-629d67cfb31f)\"" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" podUID="3928c10c-c3da-41eb-96b2-629d67cfb31f" Oct 07 08:49:32 crc kubenswrapper[4875]: I1007 08:49:32.698587 4875 scope.go:117] "RemoveContainer" containerID="fd0786c86e596eee0132032615ed75314bf901a7421dc6afedda1f75eccd9ec4" Oct 07 08:49:33 crc kubenswrapper[4875]: I1007 08:49:33.100457 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" event={"ID":"3928c10c-c3da-41eb-96b2-629d67cfb31f","Type":"ContainerStarted","Data":"6ed59ba07fed92a048cc4ad9adbccd24b39d803c41d1de70c9f57e71db4886df"} Oct 07 08:52:01 crc kubenswrapper[4875]: I1007 08:52:01.221234 4875 patch_prober.go:28] interesting pod/machine-config-daemon-hx68m container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 07 08:52:01 crc kubenswrapper[4875]: I1007 08:52:01.222209 4875 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" podUID="3928c10c-c3da-41eb-96b2-629d67cfb31f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 07 08:52:31 crc kubenswrapper[4875]: I1007 08:52:31.221204 4875 patch_prober.go:28] interesting pod/machine-config-daemon-hx68m container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 07 08:52:31 crc kubenswrapper[4875]: I1007 08:52:31.223088 4875 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" podUID="3928c10c-c3da-41eb-96b2-629d67cfb31f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 07 08:53:01 crc kubenswrapper[4875]: I1007 08:53:01.220597 4875 patch_prober.go:28] interesting pod/machine-config-daemon-hx68m container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 07 08:53:01 crc kubenswrapper[4875]: I1007 08:53:01.221270 4875 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" podUID="3928c10c-c3da-41eb-96b2-629d67cfb31f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 07 08:53:01 crc kubenswrapper[4875]: I1007 08:53:01.221321 4875 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" Oct 07 08:53:01 crc kubenswrapper[4875]: I1007 08:53:01.222098 4875 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"6ed59ba07fed92a048cc4ad9adbccd24b39d803c41d1de70c9f57e71db4886df"} pod="openshift-machine-config-operator/machine-config-daemon-hx68m" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 07 08:53:01 crc kubenswrapper[4875]: I1007 08:53:01.222154 4875 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" podUID="3928c10c-c3da-41eb-96b2-629d67cfb31f" containerName="machine-config-daemon" containerID="cri-o://6ed59ba07fed92a048cc4ad9adbccd24b39d803c41d1de70c9f57e71db4886df" gracePeriod=600 Oct 07 08:53:02 crc kubenswrapper[4875]: I1007 08:53:02.077489 4875 generic.go:334] "Generic (PLEG): container finished" podID="3928c10c-c3da-41eb-96b2-629d67cfb31f" containerID="6ed59ba07fed92a048cc4ad9adbccd24b39d803c41d1de70c9f57e71db4886df" exitCode=0 Oct 07 08:53:02 crc kubenswrapper[4875]: I1007 08:53:02.077567 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" event={"ID":"3928c10c-c3da-41eb-96b2-629d67cfb31f","Type":"ContainerDied","Data":"6ed59ba07fed92a048cc4ad9adbccd24b39d803c41d1de70c9f57e71db4886df"} Oct 07 08:53:02 crc kubenswrapper[4875]: I1007 08:53:02.078111 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" event={"ID":"3928c10c-c3da-41eb-96b2-629d67cfb31f","Type":"ContainerStarted","Data":"df6fd2d1c37696e37bee974a5fbbdc769e47de499d9e7a0feb044da0ecedbca3"} Oct 07 08:53:02 crc kubenswrapper[4875]: I1007 08:53:02.078140 4875 scope.go:117] "RemoveContainer" containerID="fd0786c86e596eee0132032615ed75314bf901a7421dc6afedda1f75eccd9ec4" Oct 07 08:53:59 crc kubenswrapper[4875]: I1007 08:53:59.585379 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-lp5sz"] Oct 07 08:53:59 crc kubenswrapper[4875]: E1007 08:53:59.586307 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="60861eac-b95b-4860-b797-4ed50c5768d5" containerName="registry-server" Oct 07 08:53:59 crc kubenswrapper[4875]: I1007 08:53:59.586323 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="60861eac-b95b-4860-b797-4ed50c5768d5" containerName="registry-server" Oct 07 08:53:59 crc kubenswrapper[4875]: E1007 08:53:59.586365 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="60861eac-b95b-4860-b797-4ed50c5768d5" containerName="extract-utilities" Oct 07 08:53:59 crc kubenswrapper[4875]: I1007 08:53:59.586374 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="60861eac-b95b-4860-b797-4ed50c5768d5" containerName="extract-utilities" Oct 07 08:53:59 crc kubenswrapper[4875]: E1007 08:53:59.586392 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="60861eac-b95b-4860-b797-4ed50c5768d5" containerName="extract-content" Oct 07 08:53:59 crc kubenswrapper[4875]: I1007 08:53:59.586402 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="60861eac-b95b-4860-b797-4ed50c5768d5" containerName="extract-content" Oct 07 08:53:59 crc kubenswrapper[4875]: I1007 08:53:59.586600 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="60861eac-b95b-4860-b797-4ed50c5768d5" containerName="registry-server" Oct 07 08:53:59 crc kubenswrapper[4875]: I1007 08:53:59.590318 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-lp5sz" Oct 07 08:53:59 crc kubenswrapper[4875]: I1007 08:53:59.599118 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-lp5sz"] Oct 07 08:53:59 crc kubenswrapper[4875]: I1007 08:53:59.687481 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wvgxm\" (UniqueName: \"kubernetes.io/projected/89eb30a6-99b0-4eaf-93c4-ac1fc58fec3e-kube-api-access-wvgxm\") pod \"certified-operators-lp5sz\" (UID: \"89eb30a6-99b0-4eaf-93c4-ac1fc58fec3e\") " pod="openshift-marketplace/certified-operators-lp5sz" Oct 07 08:53:59 crc kubenswrapper[4875]: I1007 08:53:59.687606 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/89eb30a6-99b0-4eaf-93c4-ac1fc58fec3e-catalog-content\") pod \"certified-operators-lp5sz\" (UID: \"89eb30a6-99b0-4eaf-93c4-ac1fc58fec3e\") " pod="openshift-marketplace/certified-operators-lp5sz" Oct 07 08:53:59 crc kubenswrapper[4875]: I1007 08:53:59.687700 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/89eb30a6-99b0-4eaf-93c4-ac1fc58fec3e-utilities\") pod \"certified-operators-lp5sz\" (UID: \"89eb30a6-99b0-4eaf-93c4-ac1fc58fec3e\") " pod="openshift-marketplace/certified-operators-lp5sz" Oct 07 08:53:59 crc kubenswrapper[4875]: I1007 08:53:59.789508 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wvgxm\" (UniqueName: \"kubernetes.io/projected/89eb30a6-99b0-4eaf-93c4-ac1fc58fec3e-kube-api-access-wvgxm\") pod \"certified-operators-lp5sz\" (UID: \"89eb30a6-99b0-4eaf-93c4-ac1fc58fec3e\") " pod="openshift-marketplace/certified-operators-lp5sz" Oct 07 08:53:59 crc kubenswrapper[4875]: I1007 08:53:59.789616 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/89eb30a6-99b0-4eaf-93c4-ac1fc58fec3e-catalog-content\") pod \"certified-operators-lp5sz\" (UID: \"89eb30a6-99b0-4eaf-93c4-ac1fc58fec3e\") " pod="openshift-marketplace/certified-operators-lp5sz" Oct 07 08:53:59 crc kubenswrapper[4875]: I1007 08:53:59.789683 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/89eb30a6-99b0-4eaf-93c4-ac1fc58fec3e-utilities\") pod \"certified-operators-lp5sz\" (UID: \"89eb30a6-99b0-4eaf-93c4-ac1fc58fec3e\") " pod="openshift-marketplace/certified-operators-lp5sz" Oct 07 08:53:59 crc kubenswrapper[4875]: I1007 08:53:59.790219 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/89eb30a6-99b0-4eaf-93c4-ac1fc58fec3e-catalog-content\") pod \"certified-operators-lp5sz\" (UID: \"89eb30a6-99b0-4eaf-93c4-ac1fc58fec3e\") " pod="openshift-marketplace/certified-operators-lp5sz" Oct 07 08:53:59 crc kubenswrapper[4875]: I1007 08:53:59.790261 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/89eb30a6-99b0-4eaf-93c4-ac1fc58fec3e-utilities\") pod \"certified-operators-lp5sz\" (UID: \"89eb30a6-99b0-4eaf-93c4-ac1fc58fec3e\") " pod="openshift-marketplace/certified-operators-lp5sz" Oct 07 08:53:59 crc kubenswrapper[4875]: I1007 08:53:59.812408 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wvgxm\" (UniqueName: \"kubernetes.io/projected/89eb30a6-99b0-4eaf-93c4-ac1fc58fec3e-kube-api-access-wvgxm\") pod \"certified-operators-lp5sz\" (UID: \"89eb30a6-99b0-4eaf-93c4-ac1fc58fec3e\") " pod="openshift-marketplace/certified-operators-lp5sz" Oct 07 08:53:59 crc kubenswrapper[4875]: I1007 08:53:59.913400 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-lp5sz" Oct 07 08:54:00 crc kubenswrapper[4875]: I1007 08:54:00.446752 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-lp5sz"] Oct 07 08:54:00 crc kubenswrapper[4875]: I1007 08:54:00.610328 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lp5sz" event={"ID":"89eb30a6-99b0-4eaf-93c4-ac1fc58fec3e","Type":"ContainerStarted","Data":"ff6117ce822163905978fad6a235caf2f29fc72721ece864b37ba4bdb6b69f6f"} Oct 07 08:54:01 crc kubenswrapper[4875]: I1007 08:54:01.619189 4875 generic.go:334] "Generic (PLEG): container finished" podID="89eb30a6-99b0-4eaf-93c4-ac1fc58fec3e" containerID="c70a47f9fa3ccfe9cdd42ec121a52f0870273aed41bfeb5574108fb51c57a573" exitCode=0 Oct 07 08:54:01 crc kubenswrapper[4875]: I1007 08:54:01.619241 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lp5sz" event={"ID":"89eb30a6-99b0-4eaf-93c4-ac1fc58fec3e","Type":"ContainerDied","Data":"c70a47f9fa3ccfe9cdd42ec121a52f0870273aed41bfeb5574108fb51c57a573"} Oct 07 08:54:01 crc kubenswrapper[4875]: I1007 08:54:01.622525 4875 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 07 08:54:03 crc kubenswrapper[4875]: I1007 08:54:03.637423 4875 generic.go:334] "Generic (PLEG): container finished" podID="89eb30a6-99b0-4eaf-93c4-ac1fc58fec3e" containerID="4a810c6cc885703400665c788b8ee5db103a3d12afa724231c72ae9f2df79d11" exitCode=0 Oct 07 08:54:03 crc kubenswrapper[4875]: I1007 08:54:03.637510 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lp5sz" event={"ID":"89eb30a6-99b0-4eaf-93c4-ac1fc58fec3e","Type":"ContainerDied","Data":"4a810c6cc885703400665c788b8ee5db103a3d12afa724231c72ae9f2df79d11"} Oct 07 08:54:04 crc kubenswrapper[4875]: I1007 08:54:04.649787 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lp5sz" event={"ID":"89eb30a6-99b0-4eaf-93c4-ac1fc58fec3e","Type":"ContainerStarted","Data":"a5db5f36b006625b775a3d134da048c869e07f558348172f6a14c38b5d31488c"} Oct 07 08:54:04 crc kubenswrapper[4875]: I1007 08:54:04.666397 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-lp5sz" podStartSLOduration=3.120825136 podStartE2EDuration="5.666363183s" podCreationTimestamp="2025-10-07 08:53:59 +0000 UTC" firstStartedPulling="2025-10-07 08:54:01.622236843 +0000 UTC m=+3466.582007386" lastFinishedPulling="2025-10-07 08:54:04.16777489 +0000 UTC m=+3469.127545433" observedRunningTime="2025-10-07 08:54:04.665389161 +0000 UTC m=+3469.625159714" watchObservedRunningTime="2025-10-07 08:54:04.666363183 +0000 UTC m=+3469.626133726" Oct 07 08:54:09 crc kubenswrapper[4875]: I1007 08:54:09.913607 4875 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-lp5sz" Oct 07 08:54:09 crc kubenswrapper[4875]: I1007 08:54:09.914476 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-lp5sz" Oct 07 08:54:09 crc kubenswrapper[4875]: I1007 08:54:09.972201 4875 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-lp5sz" Oct 07 08:54:10 crc kubenswrapper[4875]: I1007 08:54:10.743766 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-lp5sz" Oct 07 08:54:10 crc kubenswrapper[4875]: I1007 08:54:10.790458 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-lp5sz"] Oct 07 08:54:12 crc kubenswrapper[4875]: I1007 08:54:12.716720 4875 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-lp5sz" podUID="89eb30a6-99b0-4eaf-93c4-ac1fc58fec3e" containerName="registry-server" containerID="cri-o://a5db5f36b006625b775a3d134da048c869e07f558348172f6a14c38b5d31488c" gracePeriod=2 Oct 07 08:54:13 crc kubenswrapper[4875]: I1007 08:54:13.308357 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-lp5sz" Oct 07 08:54:13 crc kubenswrapper[4875]: I1007 08:54:13.461809 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/89eb30a6-99b0-4eaf-93c4-ac1fc58fec3e-catalog-content\") pod \"89eb30a6-99b0-4eaf-93c4-ac1fc58fec3e\" (UID: \"89eb30a6-99b0-4eaf-93c4-ac1fc58fec3e\") " Oct 07 08:54:13 crc kubenswrapper[4875]: I1007 08:54:13.462004 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/89eb30a6-99b0-4eaf-93c4-ac1fc58fec3e-utilities\") pod \"89eb30a6-99b0-4eaf-93c4-ac1fc58fec3e\" (UID: \"89eb30a6-99b0-4eaf-93c4-ac1fc58fec3e\") " Oct 07 08:54:13 crc kubenswrapper[4875]: I1007 08:54:13.462203 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wvgxm\" (UniqueName: \"kubernetes.io/projected/89eb30a6-99b0-4eaf-93c4-ac1fc58fec3e-kube-api-access-wvgxm\") pod \"89eb30a6-99b0-4eaf-93c4-ac1fc58fec3e\" (UID: \"89eb30a6-99b0-4eaf-93c4-ac1fc58fec3e\") " Oct 07 08:54:13 crc kubenswrapper[4875]: I1007 08:54:13.462844 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/89eb30a6-99b0-4eaf-93c4-ac1fc58fec3e-utilities" (OuterVolumeSpecName: "utilities") pod "89eb30a6-99b0-4eaf-93c4-ac1fc58fec3e" (UID: "89eb30a6-99b0-4eaf-93c4-ac1fc58fec3e"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 08:54:13 crc kubenswrapper[4875]: I1007 08:54:13.467972 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/89eb30a6-99b0-4eaf-93c4-ac1fc58fec3e-kube-api-access-wvgxm" (OuterVolumeSpecName: "kube-api-access-wvgxm") pod "89eb30a6-99b0-4eaf-93c4-ac1fc58fec3e" (UID: "89eb30a6-99b0-4eaf-93c4-ac1fc58fec3e"). InnerVolumeSpecName "kube-api-access-wvgxm". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 08:54:13 crc kubenswrapper[4875]: I1007 08:54:13.504868 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/89eb30a6-99b0-4eaf-93c4-ac1fc58fec3e-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "89eb30a6-99b0-4eaf-93c4-ac1fc58fec3e" (UID: "89eb30a6-99b0-4eaf-93c4-ac1fc58fec3e"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 08:54:13 crc kubenswrapper[4875]: I1007 08:54:13.563956 4875 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/89eb30a6-99b0-4eaf-93c4-ac1fc58fec3e-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 07 08:54:13 crc kubenswrapper[4875]: I1007 08:54:13.563998 4875 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/89eb30a6-99b0-4eaf-93c4-ac1fc58fec3e-utilities\") on node \"crc\" DevicePath \"\"" Oct 07 08:54:13 crc kubenswrapper[4875]: I1007 08:54:13.564009 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wvgxm\" (UniqueName: \"kubernetes.io/projected/89eb30a6-99b0-4eaf-93c4-ac1fc58fec3e-kube-api-access-wvgxm\") on node \"crc\" DevicePath \"\"" Oct 07 08:54:13 crc kubenswrapper[4875]: I1007 08:54:13.728430 4875 generic.go:334] "Generic (PLEG): container finished" podID="89eb30a6-99b0-4eaf-93c4-ac1fc58fec3e" containerID="a5db5f36b006625b775a3d134da048c869e07f558348172f6a14c38b5d31488c" exitCode=0 Oct 07 08:54:13 crc kubenswrapper[4875]: I1007 08:54:13.728477 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lp5sz" event={"ID":"89eb30a6-99b0-4eaf-93c4-ac1fc58fec3e","Type":"ContainerDied","Data":"a5db5f36b006625b775a3d134da048c869e07f558348172f6a14c38b5d31488c"} Oct 07 08:54:13 crc kubenswrapper[4875]: I1007 08:54:13.728514 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lp5sz" event={"ID":"89eb30a6-99b0-4eaf-93c4-ac1fc58fec3e","Type":"ContainerDied","Data":"ff6117ce822163905978fad6a235caf2f29fc72721ece864b37ba4bdb6b69f6f"} Oct 07 08:54:13 crc kubenswrapper[4875]: I1007 08:54:13.728531 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-lp5sz" Oct 07 08:54:13 crc kubenswrapper[4875]: I1007 08:54:13.728569 4875 scope.go:117] "RemoveContainer" containerID="a5db5f36b006625b775a3d134da048c869e07f558348172f6a14c38b5d31488c" Oct 07 08:54:13 crc kubenswrapper[4875]: I1007 08:54:13.763731 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-lp5sz"] Oct 07 08:54:13 crc kubenswrapper[4875]: I1007 08:54:13.777631 4875 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-lp5sz"] Oct 07 08:54:13 crc kubenswrapper[4875]: I1007 08:54:13.790185 4875 scope.go:117] "RemoveContainer" containerID="4a810c6cc885703400665c788b8ee5db103a3d12afa724231c72ae9f2df79d11" Oct 07 08:54:13 crc kubenswrapper[4875]: I1007 08:54:13.819982 4875 scope.go:117] "RemoveContainer" containerID="c70a47f9fa3ccfe9cdd42ec121a52f0870273aed41bfeb5574108fb51c57a573" Oct 07 08:54:13 crc kubenswrapper[4875]: I1007 08:54:13.863634 4875 scope.go:117] "RemoveContainer" containerID="a5db5f36b006625b775a3d134da048c869e07f558348172f6a14c38b5d31488c" Oct 07 08:54:13 crc kubenswrapper[4875]: E1007 08:54:13.864306 4875 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a5db5f36b006625b775a3d134da048c869e07f558348172f6a14c38b5d31488c\": container with ID starting with a5db5f36b006625b775a3d134da048c869e07f558348172f6a14c38b5d31488c not found: ID does not exist" containerID="a5db5f36b006625b775a3d134da048c869e07f558348172f6a14c38b5d31488c" Oct 07 08:54:13 crc kubenswrapper[4875]: I1007 08:54:13.864368 4875 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a5db5f36b006625b775a3d134da048c869e07f558348172f6a14c38b5d31488c"} err="failed to get container status \"a5db5f36b006625b775a3d134da048c869e07f558348172f6a14c38b5d31488c\": rpc error: code = NotFound desc = could not find container \"a5db5f36b006625b775a3d134da048c869e07f558348172f6a14c38b5d31488c\": container with ID starting with a5db5f36b006625b775a3d134da048c869e07f558348172f6a14c38b5d31488c not found: ID does not exist" Oct 07 08:54:13 crc kubenswrapper[4875]: I1007 08:54:13.864408 4875 scope.go:117] "RemoveContainer" containerID="4a810c6cc885703400665c788b8ee5db103a3d12afa724231c72ae9f2df79d11" Oct 07 08:54:13 crc kubenswrapper[4875]: E1007 08:54:13.864831 4875 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4a810c6cc885703400665c788b8ee5db103a3d12afa724231c72ae9f2df79d11\": container with ID starting with 4a810c6cc885703400665c788b8ee5db103a3d12afa724231c72ae9f2df79d11 not found: ID does not exist" containerID="4a810c6cc885703400665c788b8ee5db103a3d12afa724231c72ae9f2df79d11" Oct 07 08:54:13 crc kubenswrapper[4875]: I1007 08:54:13.864871 4875 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4a810c6cc885703400665c788b8ee5db103a3d12afa724231c72ae9f2df79d11"} err="failed to get container status \"4a810c6cc885703400665c788b8ee5db103a3d12afa724231c72ae9f2df79d11\": rpc error: code = NotFound desc = could not find container \"4a810c6cc885703400665c788b8ee5db103a3d12afa724231c72ae9f2df79d11\": container with ID starting with 4a810c6cc885703400665c788b8ee5db103a3d12afa724231c72ae9f2df79d11 not found: ID does not exist" Oct 07 08:54:13 crc kubenswrapper[4875]: I1007 08:54:13.864912 4875 scope.go:117] "RemoveContainer" containerID="c70a47f9fa3ccfe9cdd42ec121a52f0870273aed41bfeb5574108fb51c57a573" Oct 07 08:54:13 crc kubenswrapper[4875]: E1007 08:54:13.865323 4875 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c70a47f9fa3ccfe9cdd42ec121a52f0870273aed41bfeb5574108fb51c57a573\": container with ID starting with c70a47f9fa3ccfe9cdd42ec121a52f0870273aed41bfeb5574108fb51c57a573 not found: ID does not exist" containerID="c70a47f9fa3ccfe9cdd42ec121a52f0870273aed41bfeb5574108fb51c57a573" Oct 07 08:54:13 crc kubenswrapper[4875]: I1007 08:54:13.865361 4875 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c70a47f9fa3ccfe9cdd42ec121a52f0870273aed41bfeb5574108fb51c57a573"} err="failed to get container status \"c70a47f9fa3ccfe9cdd42ec121a52f0870273aed41bfeb5574108fb51c57a573\": rpc error: code = NotFound desc = could not find container \"c70a47f9fa3ccfe9cdd42ec121a52f0870273aed41bfeb5574108fb51c57a573\": container with ID starting with c70a47f9fa3ccfe9cdd42ec121a52f0870273aed41bfeb5574108fb51c57a573 not found: ID does not exist" Oct 07 08:54:15 crc kubenswrapper[4875]: I1007 08:54:15.745927 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="89eb30a6-99b0-4eaf-93c4-ac1fc58fec3e" path="/var/lib/kubelet/pods/89eb30a6-99b0-4eaf-93c4-ac1fc58fec3e/volumes" Oct 07 08:54:56 crc kubenswrapper[4875]: I1007 08:54:56.139496 4875 generic.go:334] "Generic (PLEG): container finished" podID="b9d21853-761a-4786-baa2-e0e00011a9d5" containerID="974a44a268306b4822a248a188aba8b617229741edb6e1a3b62c1fb497304a5d" exitCode=0 Oct 07 08:54:56 crc kubenswrapper[4875]: I1007 08:54:56.139605 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"b9d21853-761a-4786-baa2-e0e00011a9d5","Type":"ContainerDied","Data":"974a44a268306b4822a248a188aba8b617229741edb6e1a3b62c1fb497304a5d"} Oct 07 08:54:57 crc kubenswrapper[4875]: I1007 08:54:57.546012 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Oct 07 08:54:57 crc kubenswrapper[4875]: I1007 08:54:57.674114 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b9d21853-761a-4786-baa2-e0e00011a9d5-ssh-key\") pod \"b9d21853-761a-4786-baa2-e0e00011a9d5\" (UID: \"b9d21853-761a-4786-baa2-e0e00011a9d5\") " Oct 07 08:54:57 crc kubenswrapper[4875]: I1007 08:54:57.674162 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/b9d21853-761a-4786-baa2-e0e00011a9d5-openstack-config\") pod \"b9d21853-761a-4786-baa2-e0e00011a9d5\" (UID: \"b9d21853-761a-4786-baa2-e0e00011a9d5\") " Oct 07 08:54:57 crc kubenswrapper[4875]: I1007 08:54:57.674215 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/b9d21853-761a-4786-baa2-e0e00011a9d5-test-operator-ephemeral-workdir\") pod \"b9d21853-761a-4786-baa2-e0e00011a9d5\" (UID: \"b9d21853-761a-4786-baa2-e0e00011a9d5\") " Oct 07 08:54:57 crc kubenswrapper[4875]: I1007 08:54:57.674249 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/b9d21853-761a-4786-baa2-e0e00011a9d5-test-operator-ephemeral-temporary\") pod \"b9d21853-761a-4786-baa2-e0e00011a9d5\" (UID: \"b9d21853-761a-4786-baa2-e0e00011a9d5\") " Oct 07 08:54:57 crc kubenswrapper[4875]: I1007 08:54:57.674274 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-logs\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"b9d21853-761a-4786-baa2-e0e00011a9d5\" (UID: \"b9d21853-761a-4786-baa2-e0e00011a9d5\") " Oct 07 08:54:57 crc kubenswrapper[4875]: I1007 08:54:57.674341 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/b9d21853-761a-4786-baa2-e0e00011a9d5-ca-certs\") pod \"b9d21853-761a-4786-baa2-e0e00011a9d5\" (UID: \"b9d21853-761a-4786-baa2-e0e00011a9d5\") " Oct 07 08:54:57 crc kubenswrapper[4875]: I1007 08:54:57.674362 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/b9d21853-761a-4786-baa2-e0e00011a9d5-config-data\") pod \"b9d21853-761a-4786-baa2-e0e00011a9d5\" (UID: \"b9d21853-761a-4786-baa2-e0e00011a9d5\") " Oct 07 08:54:57 crc kubenswrapper[4875]: I1007 08:54:57.674387 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n5ph7\" (UniqueName: \"kubernetes.io/projected/b9d21853-761a-4786-baa2-e0e00011a9d5-kube-api-access-n5ph7\") pod \"b9d21853-761a-4786-baa2-e0e00011a9d5\" (UID: \"b9d21853-761a-4786-baa2-e0e00011a9d5\") " Oct 07 08:54:57 crc kubenswrapper[4875]: I1007 08:54:57.674570 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/b9d21853-761a-4786-baa2-e0e00011a9d5-openstack-config-secret\") pod \"b9d21853-761a-4786-baa2-e0e00011a9d5\" (UID: \"b9d21853-761a-4786-baa2-e0e00011a9d5\") " Oct 07 08:54:57 crc kubenswrapper[4875]: I1007 08:54:57.675600 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b9d21853-761a-4786-baa2-e0e00011a9d5-test-operator-ephemeral-temporary" (OuterVolumeSpecName: "test-operator-ephemeral-temporary") pod "b9d21853-761a-4786-baa2-e0e00011a9d5" (UID: "b9d21853-761a-4786-baa2-e0e00011a9d5"). InnerVolumeSpecName "test-operator-ephemeral-temporary". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 08:54:57 crc kubenswrapper[4875]: I1007 08:54:57.675704 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b9d21853-761a-4786-baa2-e0e00011a9d5-config-data" (OuterVolumeSpecName: "config-data") pod "b9d21853-761a-4786-baa2-e0e00011a9d5" (UID: "b9d21853-761a-4786-baa2-e0e00011a9d5"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 08:54:57 crc kubenswrapper[4875]: I1007 08:54:57.679771 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage04-crc" (OuterVolumeSpecName: "test-operator-logs") pod "b9d21853-761a-4786-baa2-e0e00011a9d5" (UID: "b9d21853-761a-4786-baa2-e0e00011a9d5"). InnerVolumeSpecName "local-storage04-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 07 08:54:57 crc kubenswrapper[4875]: I1007 08:54:57.680906 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b9d21853-761a-4786-baa2-e0e00011a9d5-kube-api-access-n5ph7" (OuterVolumeSpecName: "kube-api-access-n5ph7") pod "b9d21853-761a-4786-baa2-e0e00011a9d5" (UID: "b9d21853-761a-4786-baa2-e0e00011a9d5"). InnerVolumeSpecName "kube-api-access-n5ph7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 08:54:57 crc kubenswrapper[4875]: I1007 08:54:57.683221 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b9d21853-761a-4786-baa2-e0e00011a9d5-test-operator-ephemeral-workdir" (OuterVolumeSpecName: "test-operator-ephemeral-workdir") pod "b9d21853-761a-4786-baa2-e0e00011a9d5" (UID: "b9d21853-761a-4786-baa2-e0e00011a9d5"). InnerVolumeSpecName "test-operator-ephemeral-workdir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 08:54:57 crc kubenswrapper[4875]: I1007 08:54:57.711949 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b9d21853-761a-4786-baa2-e0e00011a9d5-ca-certs" (OuterVolumeSpecName: "ca-certs") pod "b9d21853-761a-4786-baa2-e0e00011a9d5" (UID: "b9d21853-761a-4786-baa2-e0e00011a9d5"). InnerVolumeSpecName "ca-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:54:57 crc kubenswrapper[4875]: I1007 08:54:57.714839 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b9d21853-761a-4786-baa2-e0e00011a9d5-openstack-config-secret" (OuterVolumeSpecName: "openstack-config-secret") pod "b9d21853-761a-4786-baa2-e0e00011a9d5" (UID: "b9d21853-761a-4786-baa2-e0e00011a9d5"). InnerVolumeSpecName "openstack-config-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:54:57 crc kubenswrapper[4875]: I1007 08:54:57.717467 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b9d21853-761a-4786-baa2-e0e00011a9d5-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "b9d21853-761a-4786-baa2-e0e00011a9d5" (UID: "b9d21853-761a-4786-baa2-e0e00011a9d5"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 08:54:57 crc kubenswrapper[4875]: I1007 08:54:57.738652 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b9d21853-761a-4786-baa2-e0e00011a9d5-openstack-config" (OuterVolumeSpecName: "openstack-config") pod "b9d21853-761a-4786-baa2-e0e00011a9d5" (UID: "b9d21853-761a-4786-baa2-e0e00011a9d5"). InnerVolumeSpecName "openstack-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 08:54:57 crc kubenswrapper[4875]: I1007 08:54:57.776689 4875 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b9d21853-761a-4786-baa2-e0e00011a9d5-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 07 08:54:57 crc kubenswrapper[4875]: I1007 08:54:57.776730 4875 reconciler_common.go:293] "Volume detached for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/b9d21853-761a-4786-baa2-e0e00011a9d5-openstack-config\") on node \"crc\" DevicePath \"\"" Oct 07 08:54:57 crc kubenswrapper[4875]: I1007 08:54:57.776748 4875 reconciler_common.go:293] "Volume detached for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/b9d21853-761a-4786-baa2-e0e00011a9d5-test-operator-ephemeral-workdir\") on node \"crc\" DevicePath \"\"" Oct 07 08:54:57 crc kubenswrapper[4875]: I1007 08:54:57.776759 4875 reconciler_common.go:293] "Volume detached for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/b9d21853-761a-4786-baa2-e0e00011a9d5-test-operator-ephemeral-temporary\") on node \"crc\" DevicePath \"\"" Oct 07 08:54:57 crc kubenswrapper[4875]: I1007 08:54:57.776787 4875 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") on node \"crc\" " Oct 07 08:54:57 crc kubenswrapper[4875]: I1007 08:54:57.776802 4875 reconciler_common.go:293] "Volume detached for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/b9d21853-761a-4786-baa2-e0e00011a9d5-ca-certs\") on node \"crc\" DevicePath \"\"" Oct 07 08:54:57 crc kubenswrapper[4875]: I1007 08:54:57.776814 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n5ph7\" (UniqueName: \"kubernetes.io/projected/b9d21853-761a-4786-baa2-e0e00011a9d5-kube-api-access-n5ph7\") on node \"crc\" DevicePath \"\"" Oct 07 08:54:57 crc kubenswrapper[4875]: I1007 08:54:57.776823 4875 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/b9d21853-761a-4786-baa2-e0e00011a9d5-config-data\") on node \"crc\" DevicePath \"\"" Oct 07 08:54:57 crc kubenswrapper[4875]: I1007 08:54:57.776951 4875 reconciler_common.go:293] "Volume detached for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/b9d21853-761a-4786-baa2-e0e00011a9d5-openstack-config-secret\") on node \"crc\" DevicePath \"\"" Oct 07 08:54:57 crc kubenswrapper[4875]: I1007 08:54:57.822867 4875 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage04-crc" (UniqueName: "kubernetes.io/local-volume/local-storage04-crc") on node "crc" Oct 07 08:54:57 crc kubenswrapper[4875]: I1007 08:54:57.878274 4875 reconciler_common.go:293] "Volume detached for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") on node \"crc\" DevicePath \"\"" Oct 07 08:54:58 crc kubenswrapper[4875]: I1007 08:54:58.157661 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"b9d21853-761a-4786-baa2-e0e00011a9d5","Type":"ContainerDied","Data":"65c597711a567ede7379db91788acff0151c7c51c005006f56dde18addb96d8e"} Oct 07 08:54:58 crc kubenswrapper[4875]: I1007 08:54:58.157700 4875 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="65c597711a567ede7379db91788acff0151c7c51c005006f56dde18addb96d8e" Oct 07 08:54:58 crc kubenswrapper[4875]: I1007 08:54:58.158029 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Oct 07 08:55:01 crc kubenswrapper[4875]: I1007 08:55:01.221453 4875 patch_prober.go:28] interesting pod/machine-config-daemon-hx68m container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 07 08:55:01 crc kubenswrapper[4875]: I1007 08:55:01.221794 4875 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" podUID="3928c10c-c3da-41eb-96b2-629d67cfb31f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 07 08:55:10 crc kubenswrapper[4875]: I1007 08:55:10.493426 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Oct 07 08:55:10 crc kubenswrapper[4875]: E1007 08:55:10.494479 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b9d21853-761a-4786-baa2-e0e00011a9d5" containerName="tempest-tests-tempest-tests-runner" Oct 07 08:55:10 crc kubenswrapper[4875]: I1007 08:55:10.494493 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="b9d21853-761a-4786-baa2-e0e00011a9d5" containerName="tempest-tests-tempest-tests-runner" Oct 07 08:55:10 crc kubenswrapper[4875]: E1007 08:55:10.494512 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="89eb30a6-99b0-4eaf-93c4-ac1fc58fec3e" containerName="extract-utilities" Oct 07 08:55:10 crc kubenswrapper[4875]: I1007 08:55:10.494519 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="89eb30a6-99b0-4eaf-93c4-ac1fc58fec3e" containerName="extract-utilities" Oct 07 08:55:10 crc kubenswrapper[4875]: E1007 08:55:10.494537 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="89eb30a6-99b0-4eaf-93c4-ac1fc58fec3e" containerName="extract-content" Oct 07 08:55:10 crc kubenswrapper[4875]: I1007 08:55:10.494543 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="89eb30a6-99b0-4eaf-93c4-ac1fc58fec3e" containerName="extract-content" Oct 07 08:55:10 crc kubenswrapper[4875]: E1007 08:55:10.494564 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="89eb30a6-99b0-4eaf-93c4-ac1fc58fec3e" containerName="registry-server" Oct 07 08:55:10 crc kubenswrapper[4875]: I1007 08:55:10.494570 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="89eb30a6-99b0-4eaf-93c4-ac1fc58fec3e" containerName="registry-server" Oct 07 08:55:10 crc kubenswrapper[4875]: I1007 08:55:10.494745 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="b9d21853-761a-4786-baa2-e0e00011a9d5" containerName="tempest-tests-tempest-tests-runner" Oct 07 08:55:10 crc kubenswrapper[4875]: I1007 08:55:10.494769 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="89eb30a6-99b0-4eaf-93c4-ac1fc58fec3e" containerName="registry-server" Oct 07 08:55:10 crc kubenswrapper[4875]: I1007 08:55:10.495466 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Oct 07 08:55:10 crc kubenswrapper[4875]: I1007 08:55:10.497507 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-jth75" Oct 07 08:55:10 crc kubenswrapper[4875]: I1007 08:55:10.502098 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Oct 07 08:55:10 crc kubenswrapper[4875]: I1007 08:55:10.548597 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"f90c30e8-e60e-415e-b547-e02a254b8f24\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Oct 07 08:55:10 crc kubenswrapper[4875]: I1007 08:55:10.549039 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7kfnq\" (UniqueName: \"kubernetes.io/projected/f90c30e8-e60e-415e-b547-e02a254b8f24-kube-api-access-7kfnq\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"f90c30e8-e60e-415e-b547-e02a254b8f24\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Oct 07 08:55:10 crc kubenswrapper[4875]: I1007 08:55:10.650654 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"f90c30e8-e60e-415e-b547-e02a254b8f24\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Oct 07 08:55:10 crc kubenswrapper[4875]: I1007 08:55:10.650848 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7kfnq\" (UniqueName: \"kubernetes.io/projected/f90c30e8-e60e-415e-b547-e02a254b8f24-kube-api-access-7kfnq\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"f90c30e8-e60e-415e-b547-e02a254b8f24\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Oct 07 08:55:10 crc kubenswrapper[4875]: I1007 08:55:10.651822 4875 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"f90c30e8-e60e-415e-b547-e02a254b8f24\") device mount path \"/mnt/openstack/pv04\"" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Oct 07 08:55:10 crc kubenswrapper[4875]: I1007 08:55:10.676019 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7kfnq\" (UniqueName: \"kubernetes.io/projected/f90c30e8-e60e-415e-b547-e02a254b8f24-kube-api-access-7kfnq\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"f90c30e8-e60e-415e-b547-e02a254b8f24\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Oct 07 08:55:10 crc kubenswrapper[4875]: I1007 08:55:10.682037 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"f90c30e8-e60e-415e-b547-e02a254b8f24\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Oct 07 08:55:10 crc kubenswrapper[4875]: I1007 08:55:10.836862 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Oct 07 08:55:11 crc kubenswrapper[4875]: I1007 08:55:11.285663 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Oct 07 08:55:12 crc kubenswrapper[4875]: I1007 08:55:12.289102 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" event={"ID":"f90c30e8-e60e-415e-b547-e02a254b8f24","Type":"ContainerStarted","Data":"7448e5ddf799766d71860f5d7b50a73f237a39f056b7e19f09d56eefd7cace8a"} Oct 07 08:55:13 crc kubenswrapper[4875]: I1007 08:55:13.299036 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" event={"ID":"f90c30e8-e60e-415e-b547-e02a254b8f24","Type":"ContainerStarted","Data":"3f5eeedfd2b287f9ae7eb1a4cb8c7f1dcbc6f7d8fb4ad19e8aeca48b73259672"} Oct 07 08:55:13 crc kubenswrapper[4875]: I1007 08:55:13.314729 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" podStartSLOduration=2.421995397 podStartE2EDuration="3.314709992s" podCreationTimestamp="2025-10-07 08:55:10 +0000 UTC" firstStartedPulling="2025-10-07 08:55:11.290061422 +0000 UTC m=+3536.249831965" lastFinishedPulling="2025-10-07 08:55:12.182776017 +0000 UTC m=+3537.142546560" observedRunningTime="2025-10-07 08:55:13.310637772 +0000 UTC m=+3538.270408335" watchObservedRunningTime="2025-10-07 08:55:13.314709992 +0000 UTC m=+3538.274480535" Oct 07 08:55:13 crc kubenswrapper[4875]: I1007 08:55:13.491109 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-dpvdq"] Oct 07 08:55:13 crc kubenswrapper[4875]: I1007 08:55:13.493662 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-dpvdq" Oct 07 08:55:13 crc kubenswrapper[4875]: I1007 08:55:13.502856 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-dpvdq"] Oct 07 08:55:13 crc kubenswrapper[4875]: I1007 08:55:13.605154 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5f2b4971-d2fe-4285-b93e-a798a6b623e0-utilities\") pod \"community-operators-dpvdq\" (UID: \"5f2b4971-d2fe-4285-b93e-a798a6b623e0\") " pod="openshift-marketplace/community-operators-dpvdq" Oct 07 08:55:13 crc kubenswrapper[4875]: I1007 08:55:13.605323 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zlmft\" (UniqueName: \"kubernetes.io/projected/5f2b4971-d2fe-4285-b93e-a798a6b623e0-kube-api-access-zlmft\") pod \"community-operators-dpvdq\" (UID: \"5f2b4971-d2fe-4285-b93e-a798a6b623e0\") " pod="openshift-marketplace/community-operators-dpvdq" Oct 07 08:55:13 crc kubenswrapper[4875]: I1007 08:55:13.605415 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5f2b4971-d2fe-4285-b93e-a798a6b623e0-catalog-content\") pod \"community-operators-dpvdq\" (UID: \"5f2b4971-d2fe-4285-b93e-a798a6b623e0\") " pod="openshift-marketplace/community-operators-dpvdq" Oct 07 08:55:13 crc kubenswrapper[4875]: I1007 08:55:13.707839 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zlmft\" (UniqueName: \"kubernetes.io/projected/5f2b4971-d2fe-4285-b93e-a798a6b623e0-kube-api-access-zlmft\") pod \"community-operators-dpvdq\" (UID: \"5f2b4971-d2fe-4285-b93e-a798a6b623e0\") " pod="openshift-marketplace/community-operators-dpvdq" Oct 07 08:55:13 crc kubenswrapper[4875]: I1007 08:55:13.707948 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5f2b4971-d2fe-4285-b93e-a798a6b623e0-catalog-content\") pod \"community-operators-dpvdq\" (UID: \"5f2b4971-d2fe-4285-b93e-a798a6b623e0\") " pod="openshift-marketplace/community-operators-dpvdq" Oct 07 08:55:13 crc kubenswrapper[4875]: I1007 08:55:13.708099 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5f2b4971-d2fe-4285-b93e-a798a6b623e0-utilities\") pod \"community-operators-dpvdq\" (UID: \"5f2b4971-d2fe-4285-b93e-a798a6b623e0\") " pod="openshift-marketplace/community-operators-dpvdq" Oct 07 08:55:13 crc kubenswrapper[4875]: I1007 08:55:13.708523 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5f2b4971-d2fe-4285-b93e-a798a6b623e0-catalog-content\") pod \"community-operators-dpvdq\" (UID: \"5f2b4971-d2fe-4285-b93e-a798a6b623e0\") " pod="openshift-marketplace/community-operators-dpvdq" Oct 07 08:55:13 crc kubenswrapper[4875]: I1007 08:55:13.708738 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5f2b4971-d2fe-4285-b93e-a798a6b623e0-utilities\") pod \"community-operators-dpvdq\" (UID: \"5f2b4971-d2fe-4285-b93e-a798a6b623e0\") " pod="openshift-marketplace/community-operators-dpvdq" Oct 07 08:55:13 crc kubenswrapper[4875]: I1007 08:55:13.730910 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zlmft\" (UniqueName: \"kubernetes.io/projected/5f2b4971-d2fe-4285-b93e-a798a6b623e0-kube-api-access-zlmft\") pod \"community-operators-dpvdq\" (UID: \"5f2b4971-d2fe-4285-b93e-a798a6b623e0\") " pod="openshift-marketplace/community-operators-dpvdq" Oct 07 08:55:13 crc kubenswrapper[4875]: I1007 08:55:13.823033 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-dpvdq" Oct 07 08:55:14 crc kubenswrapper[4875]: I1007 08:55:14.406083 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-dpvdq"] Oct 07 08:55:15 crc kubenswrapper[4875]: I1007 08:55:15.320800 4875 generic.go:334] "Generic (PLEG): container finished" podID="5f2b4971-d2fe-4285-b93e-a798a6b623e0" containerID="794bd0103d0b428f2266f19ac183a370544e969974d8a724534e8f4f2048cfa8" exitCode=0 Oct 07 08:55:15 crc kubenswrapper[4875]: I1007 08:55:15.320955 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dpvdq" event={"ID":"5f2b4971-d2fe-4285-b93e-a798a6b623e0","Type":"ContainerDied","Data":"794bd0103d0b428f2266f19ac183a370544e969974d8a724534e8f4f2048cfa8"} Oct 07 08:55:15 crc kubenswrapper[4875]: I1007 08:55:15.321184 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dpvdq" event={"ID":"5f2b4971-d2fe-4285-b93e-a798a6b623e0","Type":"ContainerStarted","Data":"2da0fe8d37db513bd7d047eed5dff8e376bcf18fca67a70552df0b92e6bee400"} Oct 07 08:55:16 crc kubenswrapper[4875]: I1007 08:55:16.345757 4875 generic.go:334] "Generic (PLEG): container finished" podID="5f2b4971-d2fe-4285-b93e-a798a6b623e0" containerID="6c042ed8665c262c1988127456e6929a5b5c96200736329eb58f4aff917d799f" exitCode=0 Oct 07 08:55:16 crc kubenswrapper[4875]: I1007 08:55:16.345799 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dpvdq" event={"ID":"5f2b4971-d2fe-4285-b93e-a798a6b623e0","Type":"ContainerDied","Data":"6c042ed8665c262c1988127456e6929a5b5c96200736329eb58f4aff917d799f"} Oct 07 08:55:17 crc kubenswrapper[4875]: I1007 08:55:17.362114 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dpvdq" event={"ID":"5f2b4971-d2fe-4285-b93e-a798a6b623e0","Type":"ContainerStarted","Data":"6ca57f15b39f02e9b4323dc0b85c6797235e28f026e1312a2b9e9cf238ce8025"} Oct 07 08:55:17 crc kubenswrapper[4875]: I1007 08:55:17.407823 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-dpvdq" podStartSLOduration=2.893166631 podStartE2EDuration="4.407798063s" podCreationTimestamp="2025-10-07 08:55:13 +0000 UTC" firstStartedPulling="2025-10-07 08:55:15.323501145 +0000 UTC m=+3540.283271708" lastFinishedPulling="2025-10-07 08:55:16.838132597 +0000 UTC m=+3541.797903140" observedRunningTime="2025-10-07 08:55:17.390455689 +0000 UTC m=+3542.350226242" watchObservedRunningTime="2025-10-07 08:55:17.407798063 +0000 UTC m=+3542.367568626" Oct 07 08:55:23 crc kubenswrapper[4875]: I1007 08:55:23.823118 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-dpvdq" Oct 07 08:55:23 crc kubenswrapper[4875]: I1007 08:55:23.824458 4875 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-dpvdq" Oct 07 08:55:23 crc kubenswrapper[4875]: I1007 08:55:23.875775 4875 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-dpvdq" Oct 07 08:55:24 crc kubenswrapper[4875]: I1007 08:55:24.496974 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-dpvdq" Oct 07 08:55:24 crc kubenswrapper[4875]: I1007 08:55:24.549911 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-dpvdq"] Oct 07 08:55:26 crc kubenswrapper[4875]: I1007 08:55:26.471309 4875 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-dpvdq" podUID="5f2b4971-d2fe-4285-b93e-a798a6b623e0" containerName="registry-server" containerID="cri-o://6ca57f15b39f02e9b4323dc0b85c6797235e28f026e1312a2b9e9cf238ce8025" gracePeriod=2 Oct 07 08:55:27 crc kubenswrapper[4875]: I1007 08:55:27.480798 4875 generic.go:334] "Generic (PLEG): container finished" podID="5f2b4971-d2fe-4285-b93e-a798a6b623e0" containerID="6ca57f15b39f02e9b4323dc0b85c6797235e28f026e1312a2b9e9cf238ce8025" exitCode=0 Oct 07 08:55:27 crc kubenswrapper[4875]: I1007 08:55:27.480997 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dpvdq" event={"ID":"5f2b4971-d2fe-4285-b93e-a798a6b623e0","Type":"ContainerDied","Data":"6ca57f15b39f02e9b4323dc0b85c6797235e28f026e1312a2b9e9cf238ce8025"} Oct 07 08:55:27 crc kubenswrapper[4875]: I1007 08:55:27.636964 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-dpvdq" Oct 07 08:55:27 crc kubenswrapper[4875]: I1007 08:55:27.748711 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5f2b4971-d2fe-4285-b93e-a798a6b623e0-catalog-content\") pod \"5f2b4971-d2fe-4285-b93e-a798a6b623e0\" (UID: \"5f2b4971-d2fe-4285-b93e-a798a6b623e0\") " Oct 07 08:55:27 crc kubenswrapper[4875]: I1007 08:55:27.748979 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5f2b4971-d2fe-4285-b93e-a798a6b623e0-utilities\") pod \"5f2b4971-d2fe-4285-b93e-a798a6b623e0\" (UID: \"5f2b4971-d2fe-4285-b93e-a798a6b623e0\") " Oct 07 08:55:27 crc kubenswrapper[4875]: I1007 08:55:27.749003 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zlmft\" (UniqueName: \"kubernetes.io/projected/5f2b4971-d2fe-4285-b93e-a798a6b623e0-kube-api-access-zlmft\") pod \"5f2b4971-d2fe-4285-b93e-a798a6b623e0\" (UID: \"5f2b4971-d2fe-4285-b93e-a798a6b623e0\") " Oct 07 08:55:27 crc kubenswrapper[4875]: I1007 08:55:27.751145 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5f2b4971-d2fe-4285-b93e-a798a6b623e0-utilities" (OuterVolumeSpecName: "utilities") pod "5f2b4971-d2fe-4285-b93e-a798a6b623e0" (UID: "5f2b4971-d2fe-4285-b93e-a798a6b623e0"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 08:55:27 crc kubenswrapper[4875]: I1007 08:55:27.755161 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5f2b4971-d2fe-4285-b93e-a798a6b623e0-kube-api-access-zlmft" (OuterVolumeSpecName: "kube-api-access-zlmft") pod "5f2b4971-d2fe-4285-b93e-a798a6b623e0" (UID: "5f2b4971-d2fe-4285-b93e-a798a6b623e0"). InnerVolumeSpecName "kube-api-access-zlmft". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 08:55:27 crc kubenswrapper[4875]: I1007 08:55:27.798597 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5f2b4971-d2fe-4285-b93e-a798a6b623e0-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5f2b4971-d2fe-4285-b93e-a798a6b623e0" (UID: "5f2b4971-d2fe-4285-b93e-a798a6b623e0"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 08:55:27 crc kubenswrapper[4875]: I1007 08:55:27.851249 4875 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5f2b4971-d2fe-4285-b93e-a798a6b623e0-utilities\") on node \"crc\" DevicePath \"\"" Oct 07 08:55:27 crc kubenswrapper[4875]: I1007 08:55:27.851300 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zlmft\" (UniqueName: \"kubernetes.io/projected/5f2b4971-d2fe-4285-b93e-a798a6b623e0-kube-api-access-zlmft\") on node \"crc\" DevicePath \"\"" Oct 07 08:55:27 crc kubenswrapper[4875]: I1007 08:55:27.851315 4875 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5f2b4971-d2fe-4285-b93e-a798a6b623e0-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 07 08:55:28 crc kubenswrapper[4875]: I1007 08:55:28.491616 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dpvdq" event={"ID":"5f2b4971-d2fe-4285-b93e-a798a6b623e0","Type":"ContainerDied","Data":"2da0fe8d37db513bd7d047eed5dff8e376bcf18fca67a70552df0b92e6bee400"} Oct 07 08:55:28 crc kubenswrapper[4875]: I1007 08:55:28.492030 4875 scope.go:117] "RemoveContainer" containerID="6ca57f15b39f02e9b4323dc0b85c6797235e28f026e1312a2b9e9cf238ce8025" Oct 07 08:55:28 crc kubenswrapper[4875]: I1007 08:55:28.491664 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-dpvdq" Oct 07 08:55:28 crc kubenswrapper[4875]: I1007 08:55:28.516798 4875 scope.go:117] "RemoveContainer" containerID="6c042ed8665c262c1988127456e6929a5b5c96200736329eb58f4aff917d799f" Oct 07 08:55:28 crc kubenswrapper[4875]: I1007 08:55:28.531745 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-dpvdq"] Oct 07 08:55:28 crc kubenswrapper[4875]: I1007 08:55:28.541850 4875 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-dpvdq"] Oct 07 08:55:28 crc kubenswrapper[4875]: I1007 08:55:28.550471 4875 scope.go:117] "RemoveContainer" containerID="794bd0103d0b428f2266f19ac183a370544e969974d8a724534e8f4f2048cfa8" Oct 07 08:55:29 crc kubenswrapper[4875]: I1007 08:55:29.710058 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5f2b4971-d2fe-4285-b93e-a798a6b623e0" path="/var/lib/kubelet/pods/5f2b4971-d2fe-4285-b93e-a798a6b623e0/volumes" Oct 07 08:55:30 crc kubenswrapper[4875]: I1007 08:55:30.324060 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-wbnxf/must-gather-hvc46"] Oct 07 08:55:30 crc kubenswrapper[4875]: E1007 08:55:30.324529 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5f2b4971-d2fe-4285-b93e-a798a6b623e0" containerName="extract-utilities" Oct 07 08:55:30 crc kubenswrapper[4875]: I1007 08:55:30.324548 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="5f2b4971-d2fe-4285-b93e-a798a6b623e0" containerName="extract-utilities" Oct 07 08:55:30 crc kubenswrapper[4875]: E1007 08:55:30.324561 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5f2b4971-d2fe-4285-b93e-a798a6b623e0" containerName="registry-server" Oct 07 08:55:30 crc kubenswrapper[4875]: I1007 08:55:30.324568 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="5f2b4971-d2fe-4285-b93e-a798a6b623e0" containerName="registry-server" Oct 07 08:55:30 crc kubenswrapper[4875]: E1007 08:55:30.324600 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5f2b4971-d2fe-4285-b93e-a798a6b623e0" containerName="extract-content" Oct 07 08:55:30 crc kubenswrapper[4875]: I1007 08:55:30.324610 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="5f2b4971-d2fe-4285-b93e-a798a6b623e0" containerName="extract-content" Oct 07 08:55:30 crc kubenswrapper[4875]: I1007 08:55:30.324848 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="5f2b4971-d2fe-4285-b93e-a798a6b623e0" containerName="registry-server" Oct 07 08:55:30 crc kubenswrapper[4875]: I1007 08:55:30.326246 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-wbnxf/must-gather-hvc46" Oct 07 08:55:30 crc kubenswrapper[4875]: I1007 08:55:30.328822 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-wbnxf"/"default-dockercfg-h76d4" Oct 07 08:55:30 crc kubenswrapper[4875]: I1007 08:55:30.329086 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-wbnxf"/"kube-root-ca.crt" Oct 07 08:55:30 crc kubenswrapper[4875]: I1007 08:55:30.329639 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-wbnxf"/"openshift-service-ca.crt" Oct 07 08:55:30 crc kubenswrapper[4875]: I1007 08:55:30.337260 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-wbnxf/must-gather-hvc46"] Oct 07 08:55:30 crc kubenswrapper[4875]: I1007 08:55:30.500195 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w8pqw\" (UniqueName: \"kubernetes.io/projected/3dead823-f04a-4f8b-9221-f21b144333f5-kube-api-access-w8pqw\") pod \"must-gather-hvc46\" (UID: \"3dead823-f04a-4f8b-9221-f21b144333f5\") " pod="openshift-must-gather-wbnxf/must-gather-hvc46" Oct 07 08:55:30 crc kubenswrapper[4875]: I1007 08:55:30.500285 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/3dead823-f04a-4f8b-9221-f21b144333f5-must-gather-output\") pod \"must-gather-hvc46\" (UID: \"3dead823-f04a-4f8b-9221-f21b144333f5\") " pod="openshift-must-gather-wbnxf/must-gather-hvc46" Oct 07 08:55:30 crc kubenswrapper[4875]: I1007 08:55:30.603247 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w8pqw\" (UniqueName: \"kubernetes.io/projected/3dead823-f04a-4f8b-9221-f21b144333f5-kube-api-access-w8pqw\") pod \"must-gather-hvc46\" (UID: \"3dead823-f04a-4f8b-9221-f21b144333f5\") " pod="openshift-must-gather-wbnxf/must-gather-hvc46" Oct 07 08:55:30 crc kubenswrapper[4875]: I1007 08:55:30.603414 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/3dead823-f04a-4f8b-9221-f21b144333f5-must-gather-output\") pod \"must-gather-hvc46\" (UID: \"3dead823-f04a-4f8b-9221-f21b144333f5\") " pod="openshift-must-gather-wbnxf/must-gather-hvc46" Oct 07 08:55:30 crc kubenswrapper[4875]: I1007 08:55:30.604120 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/3dead823-f04a-4f8b-9221-f21b144333f5-must-gather-output\") pod \"must-gather-hvc46\" (UID: \"3dead823-f04a-4f8b-9221-f21b144333f5\") " pod="openshift-must-gather-wbnxf/must-gather-hvc46" Oct 07 08:55:30 crc kubenswrapper[4875]: I1007 08:55:30.630206 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w8pqw\" (UniqueName: \"kubernetes.io/projected/3dead823-f04a-4f8b-9221-f21b144333f5-kube-api-access-w8pqw\") pod \"must-gather-hvc46\" (UID: \"3dead823-f04a-4f8b-9221-f21b144333f5\") " pod="openshift-must-gather-wbnxf/must-gather-hvc46" Oct 07 08:55:30 crc kubenswrapper[4875]: I1007 08:55:30.642384 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-wbnxf/must-gather-hvc46" Oct 07 08:55:31 crc kubenswrapper[4875]: I1007 08:55:31.160503 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-wbnxf/must-gather-hvc46"] Oct 07 08:55:31 crc kubenswrapper[4875]: I1007 08:55:31.221385 4875 patch_prober.go:28] interesting pod/machine-config-daemon-hx68m container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 07 08:55:31 crc kubenswrapper[4875]: I1007 08:55:31.221461 4875 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" podUID="3928c10c-c3da-41eb-96b2-629d67cfb31f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 07 08:55:31 crc kubenswrapper[4875]: I1007 08:55:31.516819 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-wbnxf/must-gather-hvc46" event={"ID":"3dead823-f04a-4f8b-9221-f21b144333f5","Type":"ContainerStarted","Data":"56cfe3adf8aa6609759af53a3095859b52712d7459cec99059306720377774d0"} Oct 07 08:55:36 crc kubenswrapper[4875]: I1007 08:55:36.573127 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-wbnxf/must-gather-hvc46" event={"ID":"3dead823-f04a-4f8b-9221-f21b144333f5","Type":"ContainerStarted","Data":"7381e53bfa580a70403f0b8218d5936b3839c57c7c7eb4b63bc807f7e3da4a81"} Oct 07 08:55:36 crc kubenswrapper[4875]: I1007 08:55:36.574249 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-wbnxf/must-gather-hvc46" event={"ID":"3dead823-f04a-4f8b-9221-f21b144333f5","Type":"ContainerStarted","Data":"1d51c8c98d518d138d78e8d823a3b3c2c3e657324adc2405194914156a646b3b"} Oct 07 08:55:36 crc kubenswrapper[4875]: I1007 08:55:36.598828 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-wbnxf/must-gather-hvc46" podStartSLOduration=2.391050388 podStartE2EDuration="6.598807687s" podCreationTimestamp="2025-10-07 08:55:30 +0000 UTC" firstStartedPulling="2025-10-07 08:55:31.168224898 +0000 UTC m=+3556.127995441" lastFinishedPulling="2025-10-07 08:55:35.375982197 +0000 UTC m=+3560.335752740" observedRunningTime="2025-10-07 08:55:36.593514318 +0000 UTC m=+3561.553284871" watchObservedRunningTime="2025-10-07 08:55:36.598807687 +0000 UTC m=+3561.558578230" Oct 07 08:55:39 crc kubenswrapper[4875]: I1007 08:55:39.533147 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-wbnxf/crc-debug-5rnxx"] Oct 07 08:55:39 crc kubenswrapper[4875]: I1007 08:55:39.535523 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-wbnxf/crc-debug-5rnxx" Oct 07 08:55:39 crc kubenswrapper[4875]: I1007 08:55:39.589051 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fhs2p\" (UniqueName: \"kubernetes.io/projected/a5863358-3d8a-4837-babd-3b3c255d5452-kube-api-access-fhs2p\") pod \"crc-debug-5rnxx\" (UID: \"a5863358-3d8a-4837-babd-3b3c255d5452\") " pod="openshift-must-gather-wbnxf/crc-debug-5rnxx" Oct 07 08:55:39 crc kubenswrapper[4875]: I1007 08:55:39.589154 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/a5863358-3d8a-4837-babd-3b3c255d5452-host\") pod \"crc-debug-5rnxx\" (UID: \"a5863358-3d8a-4837-babd-3b3c255d5452\") " pod="openshift-must-gather-wbnxf/crc-debug-5rnxx" Oct 07 08:55:39 crc kubenswrapper[4875]: I1007 08:55:39.691720 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fhs2p\" (UniqueName: \"kubernetes.io/projected/a5863358-3d8a-4837-babd-3b3c255d5452-kube-api-access-fhs2p\") pod \"crc-debug-5rnxx\" (UID: \"a5863358-3d8a-4837-babd-3b3c255d5452\") " pod="openshift-must-gather-wbnxf/crc-debug-5rnxx" Oct 07 08:55:39 crc kubenswrapper[4875]: I1007 08:55:39.691814 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/a5863358-3d8a-4837-babd-3b3c255d5452-host\") pod \"crc-debug-5rnxx\" (UID: \"a5863358-3d8a-4837-babd-3b3c255d5452\") " pod="openshift-must-gather-wbnxf/crc-debug-5rnxx" Oct 07 08:55:39 crc kubenswrapper[4875]: I1007 08:55:39.692058 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/a5863358-3d8a-4837-babd-3b3c255d5452-host\") pod \"crc-debug-5rnxx\" (UID: \"a5863358-3d8a-4837-babd-3b3c255d5452\") " pod="openshift-must-gather-wbnxf/crc-debug-5rnxx" Oct 07 08:55:39 crc kubenswrapper[4875]: I1007 08:55:39.718385 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fhs2p\" (UniqueName: \"kubernetes.io/projected/a5863358-3d8a-4837-babd-3b3c255d5452-kube-api-access-fhs2p\") pod \"crc-debug-5rnxx\" (UID: \"a5863358-3d8a-4837-babd-3b3c255d5452\") " pod="openshift-must-gather-wbnxf/crc-debug-5rnxx" Oct 07 08:55:39 crc kubenswrapper[4875]: I1007 08:55:39.862782 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-wbnxf/crc-debug-5rnxx" Oct 07 08:55:39 crc kubenswrapper[4875]: W1007 08:55:39.913883 4875 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda5863358_3d8a_4837_babd_3b3c255d5452.slice/crio-7d9b0bb96ab73e5194915d68ad08da7555b74042b2e49260e5c0216978b307ca WatchSource:0}: Error finding container 7d9b0bb96ab73e5194915d68ad08da7555b74042b2e49260e5c0216978b307ca: Status 404 returned error can't find the container with id 7d9b0bb96ab73e5194915d68ad08da7555b74042b2e49260e5c0216978b307ca Oct 07 08:55:40 crc kubenswrapper[4875]: I1007 08:55:40.618741 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-wbnxf/crc-debug-5rnxx" event={"ID":"a5863358-3d8a-4837-babd-3b3c255d5452","Type":"ContainerStarted","Data":"7d9b0bb96ab73e5194915d68ad08da7555b74042b2e49260e5c0216978b307ca"} Oct 07 08:55:51 crc kubenswrapper[4875]: I1007 08:55:51.659208 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-76nrq"] Oct 07 08:55:51 crc kubenswrapper[4875]: I1007 08:55:51.665708 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-76nrq" Oct 07 08:55:51 crc kubenswrapper[4875]: I1007 08:55:51.671536 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-76nrq"] Oct 07 08:55:51 crc kubenswrapper[4875]: I1007 08:55:51.745762 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bb99ec26-051f-4047-a2cf-b125e8f961e8-utilities\") pod \"redhat-marketplace-76nrq\" (UID: \"bb99ec26-051f-4047-a2cf-b125e8f961e8\") " pod="openshift-marketplace/redhat-marketplace-76nrq" Oct 07 08:55:51 crc kubenswrapper[4875]: I1007 08:55:51.746478 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bb99ec26-051f-4047-a2cf-b125e8f961e8-catalog-content\") pod \"redhat-marketplace-76nrq\" (UID: \"bb99ec26-051f-4047-a2cf-b125e8f961e8\") " pod="openshift-marketplace/redhat-marketplace-76nrq" Oct 07 08:55:51 crc kubenswrapper[4875]: I1007 08:55:51.746503 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vv5pj\" (UniqueName: \"kubernetes.io/projected/bb99ec26-051f-4047-a2cf-b125e8f961e8-kube-api-access-vv5pj\") pod \"redhat-marketplace-76nrq\" (UID: \"bb99ec26-051f-4047-a2cf-b125e8f961e8\") " pod="openshift-marketplace/redhat-marketplace-76nrq" Oct 07 08:55:51 crc kubenswrapper[4875]: I1007 08:55:51.847497 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vv5pj\" (UniqueName: \"kubernetes.io/projected/bb99ec26-051f-4047-a2cf-b125e8f961e8-kube-api-access-vv5pj\") pod \"redhat-marketplace-76nrq\" (UID: \"bb99ec26-051f-4047-a2cf-b125e8f961e8\") " pod="openshift-marketplace/redhat-marketplace-76nrq" Oct 07 08:55:51 crc kubenswrapper[4875]: I1007 08:55:51.847594 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bb99ec26-051f-4047-a2cf-b125e8f961e8-utilities\") pod \"redhat-marketplace-76nrq\" (UID: \"bb99ec26-051f-4047-a2cf-b125e8f961e8\") " pod="openshift-marketplace/redhat-marketplace-76nrq" Oct 07 08:55:51 crc kubenswrapper[4875]: I1007 08:55:51.847716 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bb99ec26-051f-4047-a2cf-b125e8f961e8-catalog-content\") pod \"redhat-marketplace-76nrq\" (UID: \"bb99ec26-051f-4047-a2cf-b125e8f961e8\") " pod="openshift-marketplace/redhat-marketplace-76nrq" Oct 07 08:55:51 crc kubenswrapper[4875]: I1007 08:55:51.848450 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bb99ec26-051f-4047-a2cf-b125e8f961e8-catalog-content\") pod \"redhat-marketplace-76nrq\" (UID: \"bb99ec26-051f-4047-a2cf-b125e8f961e8\") " pod="openshift-marketplace/redhat-marketplace-76nrq" Oct 07 08:55:51 crc kubenswrapper[4875]: I1007 08:55:51.848547 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bb99ec26-051f-4047-a2cf-b125e8f961e8-utilities\") pod \"redhat-marketplace-76nrq\" (UID: \"bb99ec26-051f-4047-a2cf-b125e8f961e8\") " pod="openshift-marketplace/redhat-marketplace-76nrq" Oct 07 08:55:51 crc kubenswrapper[4875]: I1007 08:55:51.870840 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vv5pj\" (UniqueName: \"kubernetes.io/projected/bb99ec26-051f-4047-a2cf-b125e8f961e8-kube-api-access-vv5pj\") pod \"redhat-marketplace-76nrq\" (UID: \"bb99ec26-051f-4047-a2cf-b125e8f961e8\") " pod="openshift-marketplace/redhat-marketplace-76nrq" Oct 07 08:55:51 crc kubenswrapper[4875]: I1007 08:55:51.990761 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-76nrq" Oct 07 08:55:52 crc kubenswrapper[4875]: I1007 08:55:52.255804 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-76nrq"] Oct 07 08:55:52 crc kubenswrapper[4875]: I1007 08:55:52.740321 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-wbnxf/crc-debug-5rnxx" event={"ID":"a5863358-3d8a-4837-babd-3b3c255d5452","Type":"ContainerStarted","Data":"bb93a8191cb0b82d9cb1a1e926eaa0b23ea3347168bda4f60d42ad89d410acdf"} Oct 07 08:55:52 crc kubenswrapper[4875]: I1007 08:55:52.742609 4875 generic.go:334] "Generic (PLEG): container finished" podID="bb99ec26-051f-4047-a2cf-b125e8f961e8" containerID="a265075e9c0a6514402907c44edde88abcdb0b8d33178422fdd293e9c0726ade" exitCode=0 Oct 07 08:55:52 crc kubenswrapper[4875]: I1007 08:55:52.742650 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-76nrq" event={"ID":"bb99ec26-051f-4047-a2cf-b125e8f961e8","Type":"ContainerDied","Data":"a265075e9c0a6514402907c44edde88abcdb0b8d33178422fdd293e9c0726ade"} Oct 07 08:55:52 crc kubenswrapper[4875]: I1007 08:55:52.742681 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-76nrq" event={"ID":"bb99ec26-051f-4047-a2cf-b125e8f961e8","Type":"ContainerStarted","Data":"c6c555093977bbe0e1c17520c31a474f3c71cdd1899b39d4faafbf211519a3c2"} Oct 07 08:55:52 crc kubenswrapper[4875]: I1007 08:55:52.755984 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-wbnxf/crc-debug-5rnxx" podStartSLOduration=2.223117984 podStartE2EDuration="13.755959278s" podCreationTimestamp="2025-10-07 08:55:39 +0000 UTC" firstStartedPulling="2025-10-07 08:55:39.919707727 +0000 UTC m=+3564.879478270" lastFinishedPulling="2025-10-07 08:55:51.452549021 +0000 UTC m=+3576.412319564" observedRunningTime="2025-10-07 08:55:52.752615212 +0000 UTC m=+3577.712385755" watchObservedRunningTime="2025-10-07 08:55:52.755959278 +0000 UTC m=+3577.715729821" Oct 07 08:55:54 crc kubenswrapper[4875]: I1007 08:55:54.776007 4875 generic.go:334] "Generic (PLEG): container finished" podID="bb99ec26-051f-4047-a2cf-b125e8f961e8" containerID="52e68739b9a130474c58bc2b3a92eae497adb1bda65852cc0d2fec0e0403ac28" exitCode=0 Oct 07 08:55:54 crc kubenswrapper[4875]: I1007 08:55:54.776147 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-76nrq" event={"ID":"bb99ec26-051f-4047-a2cf-b125e8f961e8","Type":"ContainerDied","Data":"52e68739b9a130474c58bc2b3a92eae497adb1bda65852cc0d2fec0e0403ac28"} Oct 07 08:55:56 crc kubenswrapper[4875]: I1007 08:55:56.800999 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-76nrq" event={"ID":"bb99ec26-051f-4047-a2cf-b125e8f961e8","Type":"ContainerStarted","Data":"670dc9ac1060964b28c80a31fa31ed70e17c880d6e0973794a8e22cf65d9f04a"} Oct 07 08:55:56 crc kubenswrapper[4875]: I1007 08:55:56.824073 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-76nrq" podStartSLOduration=2.901579311 podStartE2EDuration="5.8240452s" podCreationTimestamp="2025-10-07 08:55:51 +0000 UTC" firstStartedPulling="2025-10-07 08:55:52.74599984 +0000 UTC m=+3577.705770383" lastFinishedPulling="2025-10-07 08:55:55.668465729 +0000 UTC m=+3580.628236272" observedRunningTime="2025-10-07 08:55:56.817364317 +0000 UTC m=+3581.777134870" watchObservedRunningTime="2025-10-07 08:55:56.8240452 +0000 UTC m=+3581.783815743" Oct 07 08:56:01 crc kubenswrapper[4875]: I1007 08:56:01.221788 4875 patch_prober.go:28] interesting pod/machine-config-daemon-hx68m container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 07 08:56:01 crc kubenswrapper[4875]: I1007 08:56:01.223122 4875 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" podUID="3928c10c-c3da-41eb-96b2-629d67cfb31f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 07 08:56:01 crc kubenswrapper[4875]: I1007 08:56:01.223211 4875 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" Oct 07 08:56:01 crc kubenswrapper[4875]: I1007 08:56:01.224208 4875 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"df6fd2d1c37696e37bee974a5fbbdc769e47de499d9e7a0feb044da0ecedbca3"} pod="openshift-machine-config-operator/machine-config-daemon-hx68m" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 07 08:56:01 crc kubenswrapper[4875]: I1007 08:56:01.224270 4875 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" podUID="3928c10c-c3da-41eb-96b2-629d67cfb31f" containerName="machine-config-daemon" containerID="cri-o://df6fd2d1c37696e37bee974a5fbbdc769e47de499d9e7a0feb044da0ecedbca3" gracePeriod=600 Oct 07 08:56:01 crc kubenswrapper[4875]: E1007 08:56:01.397046 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hx68m_openshift-machine-config-operator(3928c10c-c3da-41eb-96b2-629d67cfb31f)\"" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" podUID="3928c10c-c3da-41eb-96b2-629d67cfb31f" Oct 07 08:56:01 crc kubenswrapper[4875]: I1007 08:56:01.848706 4875 generic.go:334] "Generic (PLEG): container finished" podID="3928c10c-c3da-41eb-96b2-629d67cfb31f" containerID="df6fd2d1c37696e37bee974a5fbbdc769e47de499d9e7a0feb044da0ecedbca3" exitCode=0 Oct 07 08:56:01 crc kubenswrapper[4875]: I1007 08:56:01.848749 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" event={"ID":"3928c10c-c3da-41eb-96b2-629d67cfb31f","Type":"ContainerDied","Data":"df6fd2d1c37696e37bee974a5fbbdc769e47de499d9e7a0feb044da0ecedbca3"} Oct 07 08:56:01 crc kubenswrapper[4875]: I1007 08:56:01.848782 4875 scope.go:117] "RemoveContainer" containerID="6ed59ba07fed92a048cc4ad9adbccd24b39d803c41d1de70c9f57e71db4886df" Oct 07 08:56:01 crc kubenswrapper[4875]: I1007 08:56:01.849643 4875 scope.go:117] "RemoveContainer" containerID="df6fd2d1c37696e37bee974a5fbbdc769e47de499d9e7a0feb044da0ecedbca3" Oct 07 08:56:01 crc kubenswrapper[4875]: E1007 08:56:01.850235 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hx68m_openshift-machine-config-operator(3928c10c-c3da-41eb-96b2-629d67cfb31f)\"" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" podUID="3928c10c-c3da-41eb-96b2-629d67cfb31f" Oct 07 08:56:01 crc kubenswrapper[4875]: I1007 08:56:01.991064 4875 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-76nrq" Oct 07 08:56:01 crc kubenswrapper[4875]: I1007 08:56:01.991467 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-76nrq" Oct 07 08:56:02 crc kubenswrapper[4875]: I1007 08:56:02.048188 4875 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-76nrq" Oct 07 08:56:02 crc kubenswrapper[4875]: I1007 08:56:02.926601 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-76nrq" Oct 07 08:56:02 crc kubenswrapper[4875]: I1007 08:56:02.983750 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-76nrq"] Oct 07 08:56:04 crc kubenswrapper[4875]: I1007 08:56:04.888347 4875 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-76nrq" podUID="bb99ec26-051f-4047-a2cf-b125e8f961e8" containerName="registry-server" containerID="cri-o://670dc9ac1060964b28c80a31fa31ed70e17c880d6e0973794a8e22cf65d9f04a" gracePeriod=2 Oct 07 08:56:05 crc kubenswrapper[4875]: I1007 08:56:05.899356 4875 generic.go:334] "Generic (PLEG): container finished" podID="bb99ec26-051f-4047-a2cf-b125e8f961e8" containerID="670dc9ac1060964b28c80a31fa31ed70e17c880d6e0973794a8e22cf65d9f04a" exitCode=0 Oct 07 08:56:05 crc kubenswrapper[4875]: I1007 08:56:05.899830 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-76nrq" event={"ID":"bb99ec26-051f-4047-a2cf-b125e8f961e8","Type":"ContainerDied","Data":"670dc9ac1060964b28c80a31fa31ed70e17c880d6e0973794a8e22cf65d9f04a"} Oct 07 08:56:05 crc kubenswrapper[4875]: I1007 08:56:05.899866 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-76nrq" event={"ID":"bb99ec26-051f-4047-a2cf-b125e8f961e8","Type":"ContainerDied","Data":"c6c555093977bbe0e1c17520c31a474f3c71cdd1899b39d4faafbf211519a3c2"} Oct 07 08:56:05 crc kubenswrapper[4875]: I1007 08:56:05.899931 4875 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c6c555093977bbe0e1c17520c31a474f3c71cdd1899b39d4faafbf211519a3c2" Oct 07 08:56:07 crc kubenswrapper[4875]: I1007 08:56:07.269118 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-76nrq" Oct 07 08:56:07 crc kubenswrapper[4875]: I1007 08:56:07.399388 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bb99ec26-051f-4047-a2cf-b125e8f961e8-catalog-content\") pod \"bb99ec26-051f-4047-a2cf-b125e8f961e8\" (UID: \"bb99ec26-051f-4047-a2cf-b125e8f961e8\") " Oct 07 08:56:07 crc kubenswrapper[4875]: I1007 08:56:07.399585 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vv5pj\" (UniqueName: \"kubernetes.io/projected/bb99ec26-051f-4047-a2cf-b125e8f961e8-kube-api-access-vv5pj\") pod \"bb99ec26-051f-4047-a2cf-b125e8f961e8\" (UID: \"bb99ec26-051f-4047-a2cf-b125e8f961e8\") " Oct 07 08:56:07 crc kubenswrapper[4875]: I1007 08:56:07.399659 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bb99ec26-051f-4047-a2cf-b125e8f961e8-utilities\") pod \"bb99ec26-051f-4047-a2cf-b125e8f961e8\" (UID: \"bb99ec26-051f-4047-a2cf-b125e8f961e8\") " Oct 07 08:56:07 crc kubenswrapper[4875]: I1007 08:56:07.401042 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bb99ec26-051f-4047-a2cf-b125e8f961e8-utilities" (OuterVolumeSpecName: "utilities") pod "bb99ec26-051f-4047-a2cf-b125e8f961e8" (UID: "bb99ec26-051f-4047-a2cf-b125e8f961e8"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 08:56:07 crc kubenswrapper[4875]: I1007 08:56:07.410457 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bb99ec26-051f-4047-a2cf-b125e8f961e8-kube-api-access-vv5pj" (OuterVolumeSpecName: "kube-api-access-vv5pj") pod "bb99ec26-051f-4047-a2cf-b125e8f961e8" (UID: "bb99ec26-051f-4047-a2cf-b125e8f961e8"). InnerVolumeSpecName "kube-api-access-vv5pj". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 08:56:07 crc kubenswrapper[4875]: I1007 08:56:07.414978 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bb99ec26-051f-4047-a2cf-b125e8f961e8-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "bb99ec26-051f-4047-a2cf-b125e8f961e8" (UID: "bb99ec26-051f-4047-a2cf-b125e8f961e8"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 08:56:07 crc kubenswrapper[4875]: I1007 08:56:07.502103 4875 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bb99ec26-051f-4047-a2cf-b125e8f961e8-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 07 08:56:07 crc kubenswrapper[4875]: I1007 08:56:07.502423 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vv5pj\" (UniqueName: \"kubernetes.io/projected/bb99ec26-051f-4047-a2cf-b125e8f961e8-kube-api-access-vv5pj\") on node \"crc\" DevicePath \"\"" Oct 07 08:56:07 crc kubenswrapper[4875]: I1007 08:56:07.502436 4875 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bb99ec26-051f-4047-a2cf-b125e8f961e8-utilities\") on node \"crc\" DevicePath \"\"" Oct 07 08:56:07 crc kubenswrapper[4875]: I1007 08:56:07.916739 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-76nrq" Oct 07 08:56:07 crc kubenswrapper[4875]: I1007 08:56:07.947373 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-76nrq"] Oct 07 08:56:07 crc kubenswrapper[4875]: I1007 08:56:07.956729 4875 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-76nrq"] Oct 07 08:56:09 crc kubenswrapper[4875]: I1007 08:56:09.712108 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bb99ec26-051f-4047-a2cf-b125e8f961e8" path="/var/lib/kubelet/pods/bb99ec26-051f-4047-a2cf-b125e8f961e8/volumes" Oct 07 08:56:15 crc kubenswrapper[4875]: I1007 08:56:15.704674 4875 scope.go:117] "RemoveContainer" containerID="df6fd2d1c37696e37bee974a5fbbdc769e47de499d9e7a0feb044da0ecedbca3" Oct 07 08:56:15 crc kubenswrapper[4875]: E1007 08:56:15.705818 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hx68m_openshift-machine-config-operator(3928c10c-c3da-41eb-96b2-629d67cfb31f)\"" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" podUID="3928c10c-c3da-41eb-96b2-629d67cfb31f" Oct 07 08:56:26 crc kubenswrapper[4875]: I1007 08:56:26.697744 4875 scope.go:117] "RemoveContainer" containerID="df6fd2d1c37696e37bee974a5fbbdc769e47de499d9e7a0feb044da0ecedbca3" Oct 07 08:56:26 crc kubenswrapper[4875]: E1007 08:56:26.698603 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hx68m_openshift-machine-config-operator(3928c10c-c3da-41eb-96b2-629d67cfb31f)\"" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" podUID="3928c10c-c3da-41eb-96b2-629d67cfb31f" Oct 07 08:56:40 crc kubenswrapper[4875]: I1007 08:56:40.697570 4875 scope.go:117] "RemoveContainer" containerID="df6fd2d1c37696e37bee974a5fbbdc769e47de499d9e7a0feb044da0ecedbca3" Oct 07 08:56:40 crc kubenswrapper[4875]: E1007 08:56:40.698402 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hx68m_openshift-machine-config-operator(3928c10c-c3da-41eb-96b2-629d67cfb31f)\"" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" podUID="3928c10c-c3da-41eb-96b2-629d67cfb31f" Oct 07 08:56:43 crc kubenswrapper[4875]: I1007 08:56:43.488358 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-d454b8786-74tns_127bbfa5-9171-435f-99d2-069db85c4d67/barbican-api/0.log" Oct 07 08:56:43 crc kubenswrapper[4875]: I1007 08:56:43.560487 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-d454b8786-74tns_127bbfa5-9171-435f-99d2-069db85c4d67/barbican-api-log/0.log" Oct 07 08:56:43 crc kubenswrapper[4875]: I1007 08:56:43.767900 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-5dbb8f897d-wgrt2_5c001ba7-ccf7-40ea-85b4-4ba3c981ac0e/barbican-keystone-listener/0.log" Oct 07 08:56:43 crc kubenswrapper[4875]: I1007 08:56:43.840818 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-5dbb8f897d-wgrt2_5c001ba7-ccf7-40ea-85b4-4ba3c981ac0e/barbican-keystone-listener-log/0.log" Oct 07 08:56:44 crc kubenswrapper[4875]: I1007 08:56:44.023669 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-576fbf56f5-4mtlh_3021a32f-0a9f-4dea-8da2-2ae1df754ccc/barbican-worker/0.log" Oct 07 08:56:44 crc kubenswrapper[4875]: I1007 08:56:44.085932 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-576fbf56f5-4mtlh_3021a32f-0a9f-4dea-8da2-2ae1df754ccc/barbican-worker-log/0.log" Oct 07 08:56:44 crc kubenswrapper[4875]: I1007 08:56:44.440841 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_bootstrap-edpm-deployment-openstack-edpm-ipam-wd6f4_15ba8b2a-ed31-47c5-b655-efb44ceb0134/bootstrap-edpm-deployment-openstack-edpm-ipam/0.log" Oct 07 08:56:44 crc kubenswrapper[4875]: I1007 08:56:44.723788 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_453d5031-7d52-48b9-abd5-5c261297ee70/ceilometer-notification-agent/0.log" Oct 07 08:56:44 crc kubenswrapper[4875]: I1007 08:56:44.762058 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_453d5031-7d52-48b9-abd5-5c261297ee70/ceilometer-central-agent/0.log" Oct 07 08:56:44 crc kubenswrapper[4875]: I1007 08:56:44.792391 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_453d5031-7d52-48b9-abd5-5c261297ee70/proxy-httpd/0.log" Oct 07 08:56:44 crc kubenswrapper[4875]: I1007 08:56:44.989620 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_453d5031-7d52-48b9-abd5-5c261297ee70/sg-core/0.log" Oct 07 08:56:45 crc kubenswrapper[4875]: I1007 08:56:45.040494 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_3a93ed97-4ce8-45f0-b81c-52e3613ea189/cinder-api/0.log" Oct 07 08:56:45 crc kubenswrapper[4875]: I1007 08:56:45.220722 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_3a93ed97-4ce8-45f0-b81c-52e3613ea189/cinder-api-log/0.log" Oct 07 08:56:45 crc kubenswrapper[4875]: I1007 08:56:45.313148 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_63b2e079-16c2-4f61-8ea4-a0fd50150f03/cinder-scheduler/0.log" Oct 07 08:56:45 crc kubenswrapper[4875]: I1007 08:56:45.488232 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_63b2e079-16c2-4f61-8ea4-a0fd50150f03/probe/0.log" Oct 07 08:56:45 crc kubenswrapper[4875]: I1007 08:56:45.539028 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-network-edpm-deployment-openstack-edpm-ipam-jxf7h_7af841b2-2a3c-4cea-a5b1-5f854609190b/configure-network-edpm-deployment-openstack-edpm-ipam/0.log" Oct 07 08:56:45 crc kubenswrapper[4875]: I1007 08:56:45.767991 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-os-edpm-deployment-openstack-edpm-ipam-7pmfq_965bc704-2251-46a2-b947-05d835da9ea9/configure-os-edpm-deployment-openstack-edpm-ipam/0.log" Oct 07 08:56:45 crc kubenswrapper[4875]: I1007 08:56:45.994967 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-os-edpm-deployment-openstack-edpm-ipam-8tqzx_063ffc6e-9bc6-41ea-9d6d-d73e3923c92a/configure-os-edpm-deployment-openstack-edpm-ipam/0.log" Oct 07 08:56:46 crc kubenswrapper[4875]: I1007 08:56:46.067972 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-78c64bc9c5-5gt8q_f505e712-23fa-4ef3-b464-591427bea934/init/0.log" Oct 07 08:56:46 crc kubenswrapper[4875]: I1007 08:56:46.365782 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-78c64bc9c5-5gt8q_f505e712-23fa-4ef3-b464-591427bea934/init/0.log" Oct 07 08:56:46 crc kubenswrapper[4875]: I1007 08:56:46.374596 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-78c64bc9c5-5gt8q_f505e712-23fa-4ef3-b464-591427bea934/dnsmasq-dns/0.log" Oct 07 08:56:46 crc kubenswrapper[4875]: I1007 08:56:46.631712 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_9d4dbc12-0c00-4b2a-ad57-055d19cebf0a/glance-httpd/0.log" Oct 07 08:56:46 crc kubenswrapper[4875]: I1007 08:56:46.641463 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_download-cache-edpm-deployment-openstack-edpm-ipam-f424w_7b638466-3fdb-4290-8a73-9f3d018a8ee0/download-cache-edpm-deployment-openstack-edpm-ipam/0.log" Oct 07 08:56:46 crc kubenswrapper[4875]: I1007 08:56:46.888224 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_9d4dbc12-0c00-4b2a-ad57-055d19cebf0a/glance-log/0.log" Oct 07 08:56:46 crc kubenswrapper[4875]: I1007 08:56:46.910865 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_95d6a4f6-2e39-4b75-aadf-ca829e1e9911/glance-httpd/0.log" Oct 07 08:56:47 crc kubenswrapper[4875]: I1007 08:56:47.080531 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_95d6a4f6-2e39-4b75-aadf-ca829e1e9911/glance-log/0.log" Oct 07 08:56:47 crc kubenswrapper[4875]: I1007 08:56:47.265814 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-64854f4c8-d67s8_cfb37cba-9925-4808-9b9f-6dfd2550c15e/horizon/0.log" Oct 07 08:56:47 crc kubenswrapper[4875]: I1007 08:56:47.502462 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-certs-edpm-deployment-openstack-edpm-ipam-t8wql_f7030085-e862-4c57-9c9e-29e88006533e/install-certs-edpm-deployment-openstack-edpm-ipam/0.log" Oct 07 08:56:47 crc kubenswrapper[4875]: I1007 08:56:47.512843 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-64854f4c8-d67s8_cfb37cba-9925-4808-9b9f-6dfd2550c15e/horizon-log/0.log" Oct 07 08:56:47 crc kubenswrapper[4875]: I1007 08:56:47.613527 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-os-edpm-deployment-openstack-edpm-ipam-jwr5d_489def06-7200-4a3d-9d81-a811bac28712/install-os-edpm-deployment-openstack-edpm-ipam/0.log" Oct 07 08:56:47 crc kubenswrapper[4875]: I1007 08:56:47.839266 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_kube-state-metrics-0_cd2084f6-1596-45c2-a4ba-1cbd7a1ca565/kube-state-metrics/0.log" Oct 07 08:56:47 crc kubenswrapper[4875]: I1007 08:56:47.918264 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-754566d8d4-jzmbw_bcdbebc0-2911-4723-9a61-718037d0d1dc/keystone-api/0.log" Oct 07 08:56:48 crc kubenswrapper[4875]: I1007 08:56:48.073697 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_libvirt-edpm-deployment-openstack-edpm-ipam-69n9w_3ec6c99f-4455-40ea-8a27-bf56298f3e17/libvirt-edpm-deployment-openstack-edpm-ipam/0.log" Oct 07 08:56:48 crc kubenswrapper[4875]: I1007 08:56:48.467252 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-5f5f877689-scxcf_a42b578e-6f65-40c9-94f4-2b843c305470/neutron-httpd/0.log" Oct 07 08:56:48 crc kubenswrapper[4875]: I1007 08:56:48.474011 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-5f5f877689-scxcf_a42b578e-6f65-40c9-94f4-2b843c305470/neutron-api/0.log" Oct 07 08:56:48 crc kubenswrapper[4875]: I1007 08:56:48.731937 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-metadata-edpm-deployment-openstack-edpm-ipam-wh5wz_c6df5d2d-85d8-4d79-b8a2-f3b6f7060019/neutron-metadata-edpm-deployment-openstack-edpm-ipam/0.log" Oct 07 08:56:49 crc kubenswrapper[4875]: I1007 08:56:49.375339 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_3774daf2-c509-494e-81f3-9cadf5a30459/nova-api-log/0.log" Oct 07 08:56:49 crc kubenswrapper[4875]: I1007 08:56:49.473605 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell0-conductor-0_98872838-7b40-4854-97c4-edddb9a931a5/nova-cell0-conductor-conductor/0.log" Oct 07 08:56:49 crc kubenswrapper[4875]: I1007 08:56:49.571866 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_3774daf2-c509-494e-81f3-9cadf5a30459/nova-api-api/0.log" Oct 07 08:56:49 crc kubenswrapper[4875]: I1007 08:56:49.874939 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-conductor-0_eb928cc7-3310-4d3e-929c-d470f8a8f8bb/nova-cell1-conductor-conductor/0.log" Oct 07 08:56:49 crc kubenswrapper[4875]: I1007 08:56:49.972044 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-novncproxy-0_e35464b6-e9ab-4262-a99d-efad04dbd0e0/nova-cell1-novncproxy-novncproxy/0.log" Oct 07 08:56:50 crc kubenswrapper[4875]: I1007 08:56:50.201050 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-edpm-deployment-openstack-edpm-ipam-kmx8n_1e54ad8c-e627-4e27-90d1-ea193eb2f42f/nova-edpm-deployment-openstack-edpm-ipam/0.log" Oct 07 08:56:50 crc kubenswrapper[4875]: I1007 08:56:50.419225 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_47ec1bb6-1d05-4ed7-b706-6d25c4146e7d/nova-metadata-log/0.log" Oct 07 08:56:50 crc kubenswrapper[4875]: I1007 08:56:50.923727 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-scheduler-0_d49ca1ef-7891-416e-9d67-d17b4b031624/nova-scheduler-scheduler/0.log" Oct 07 08:56:50 crc kubenswrapper[4875]: I1007 08:56:50.998204 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_8dad2a19-0fbf-46b6-b534-7f2712b644d7/mysql-bootstrap/0.log" Oct 07 08:56:51 crc kubenswrapper[4875]: I1007 08:56:51.202227 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_8dad2a19-0fbf-46b6-b534-7f2712b644d7/mysql-bootstrap/0.log" Oct 07 08:56:51 crc kubenswrapper[4875]: I1007 08:56:51.252886 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_8dad2a19-0fbf-46b6-b534-7f2712b644d7/galera/0.log" Oct 07 08:56:51 crc kubenswrapper[4875]: I1007 08:56:51.566726 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_027e3c4c-1861-4933-9d30-636575099b5c/mysql-bootstrap/0.log" Oct 07 08:56:51 crc kubenswrapper[4875]: I1007 08:56:51.789126 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_027e3c4c-1861-4933-9d30-636575099b5c/mysql-bootstrap/0.log" Oct 07 08:56:51 crc kubenswrapper[4875]: I1007 08:56:51.852736 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_027e3c4c-1861-4933-9d30-636575099b5c/galera/0.log" Oct 07 08:56:51 crc kubenswrapper[4875]: I1007 08:56:51.901025 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_47ec1bb6-1d05-4ed7-b706-6d25c4146e7d/nova-metadata-metadata/0.log" Oct 07 08:56:52 crc kubenswrapper[4875]: I1007 08:56:52.120006 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstackclient_c25e751b-f41b-4571-92e4-81d1b263ed48/openstackclient/0.log" Oct 07 08:56:52 crc kubenswrapper[4875]: I1007 08:56:52.330687 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-6rsbk_e984b1d0-f011-46a5-8339-966f44e3c603/openstack-network-exporter/0.log" Oct 07 08:56:52 crc kubenswrapper[4875]: I1007 08:56:52.493807 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-npwcn_97a62f1e-e3e0-4592-82a8-2524ba6df291/ovn-controller/0.log" Oct 07 08:56:52 crc kubenswrapper[4875]: I1007 08:56:52.654652 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-vwbf5_318e0040-83b4-4fa3-95ff-768b9eb422e7/ovsdb-server-init/0.log" Oct 07 08:56:52 crc kubenswrapper[4875]: I1007 08:56:52.894275 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-vwbf5_318e0040-83b4-4fa3-95ff-768b9eb422e7/ovsdb-server-init/0.log" Oct 07 08:56:52 crc kubenswrapper[4875]: I1007 08:56:52.965856 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-vwbf5_318e0040-83b4-4fa3-95ff-768b9eb422e7/ovsdb-server/0.log" Oct 07 08:56:52 crc kubenswrapper[4875]: I1007 08:56:52.998420 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-vwbf5_318e0040-83b4-4fa3-95ff-768b9eb422e7/ovs-vswitchd/0.log" Oct 07 08:56:53 crc kubenswrapper[4875]: I1007 08:56:53.244505 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-edpm-deployment-openstack-edpm-ipam-mrzjf_97901af5-a883-4d4f-acd8-9425772903a9/ovn-edpm-deployment-openstack-edpm-ipam/0.log" Oct 07 08:56:53 crc kubenswrapper[4875]: I1007 08:56:53.420450 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_84eec933-bbc6-4961-940a-1a26f31d2fd3/openstack-network-exporter/0.log" Oct 07 08:56:53 crc kubenswrapper[4875]: I1007 08:56:53.497977 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_84eec933-bbc6-4961-940a-1a26f31d2fd3/ovn-northd/0.log" Oct 07 08:56:53 crc kubenswrapper[4875]: I1007 08:56:53.669656 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_e845f7e2-55f1-445d-8155-8a92bc2ee519/openstack-network-exporter/0.log" Oct 07 08:56:53 crc kubenswrapper[4875]: I1007 08:56:53.737308 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_e845f7e2-55f1-445d-8155-8a92bc2ee519/ovsdbserver-nb/0.log" Oct 07 08:56:53 crc kubenswrapper[4875]: I1007 08:56:53.970285 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_021914db-05c8-4ae9-a24e-dee6bfabff00/openstack-network-exporter/0.log" Oct 07 08:56:53 crc kubenswrapper[4875]: I1007 08:56:53.992644 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_021914db-05c8-4ae9-a24e-dee6bfabff00/ovsdbserver-sb/0.log" Oct 07 08:56:54 crc kubenswrapper[4875]: I1007 08:56:54.297913 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-548b9747d4-bkwjt_ba464fba-e931-4f8a-be56-6b5456e1572d/placement-api/0.log" Oct 07 08:56:54 crc kubenswrapper[4875]: I1007 08:56:54.352431 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-548b9747d4-bkwjt_ba464fba-e931-4f8a-be56-6b5456e1572d/placement-log/0.log" Oct 07 08:56:54 crc kubenswrapper[4875]: I1007 08:56:54.537492 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_8bff65c7-6542-4501-90d2-fedc97d9a9d7/setup-container/0.log" Oct 07 08:56:54 crc kubenswrapper[4875]: I1007 08:56:54.697958 4875 scope.go:117] "RemoveContainer" containerID="df6fd2d1c37696e37bee974a5fbbdc769e47de499d9e7a0feb044da0ecedbca3" Oct 07 08:56:54 crc kubenswrapper[4875]: E1007 08:56:54.698260 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hx68m_openshift-machine-config-operator(3928c10c-c3da-41eb-96b2-629d67cfb31f)\"" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" podUID="3928c10c-c3da-41eb-96b2-629d67cfb31f" Oct 07 08:56:54 crc kubenswrapper[4875]: I1007 08:56:54.818170 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_8bff65c7-6542-4501-90d2-fedc97d9a9d7/setup-container/0.log" Oct 07 08:56:54 crc kubenswrapper[4875]: I1007 08:56:54.846507 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_8bff65c7-6542-4501-90d2-fedc97d9a9d7/rabbitmq/0.log" Oct 07 08:56:55 crc kubenswrapper[4875]: I1007 08:56:55.045245 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_e2d60697-89d7-42a0-9457-efef02815764/setup-container/0.log" Oct 07 08:56:55 crc kubenswrapper[4875]: I1007 08:56:55.250430 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_e2d60697-89d7-42a0-9457-efef02815764/setup-container/0.log" Oct 07 08:56:55 crc kubenswrapper[4875]: I1007 08:56:55.363827 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_e2d60697-89d7-42a0-9457-efef02815764/rabbitmq/0.log" Oct 07 08:56:55 crc kubenswrapper[4875]: I1007 08:56:55.531328 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_reboot-os-edpm-deployment-openstack-edpm-ipam-r76h5_f21455d3-51d3-464a-acd5-d707dfa2ee70/reboot-os-edpm-deployment-openstack-edpm-ipam/0.log" Oct 07 08:56:55 crc kubenswrapper[4875]: I1007 08:56:55.695847 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_redhat-edpm-deployment-openstack-edpm-ipam-mztgk_19285527-95a7-43c0-9366-3d8895c09835/redhat-edpm-deployment-openstack-edpm-ipam/0.log" Oct 07 08:56:55 crc kubenswrapper[4875]: I1007 08:56:55.868357 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_repo-setup-edpm-deployment-openstack-edpm-ipam-grkp4_23730b76-30ac-47bf-8043-3c713a209e1e/repo-setup-edpm-deployment-openstack-edpm-ipam/0.log" Oct 07 08:56:56 crc kubenswrapper[4875]: I1007 08:56:56.106068 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_run-os-edpm-deployment-openstack-edpm-ipam-zlwhl_3910d9af-0583-40ef-887d-e73ddf795725/run-os-edpm-deployment-openstack-edpm-ipam/0.log" Oct 07 08:56:56 crc kubenswrapper[4875]: I1007 08:56:56.197240 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ssh-known-hosts-edpm-deployment-prbmz_dfa58f34-fade-4504-8329-a9b13eb13726/ssh-known-hosts-edpm-deployment/0.log" Oct 07 08:56:56 crc kubenswrapper[4875]: I1007 08:56:56.437503 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-88cf5ccd5-wf5cp_74566d25-db70-4528-b9c4-89b32863c2eb/proxy-server/0.log" Oct 07 08:56:56 crc kubenswrapper[4875]: I1007 08:56:56.502785 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-88cf5ccd5-wf5cp_74566d25-db70-4528-b9c4-89b32863c2eb/proxy-httpd/0.log" Oct 07 08:56:56 crc kubenswrapper[4875]: I1007 08:56:56.628744 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-ring-rebalance-7bh99_e4ed1841-ffa8-4d3b-8a66-43221118d007/swift-ring-rebalance/0.log" Oct 07 08:56:56 crc kubenswrapper[4875]: I1007 08:56:56.866217 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_97048952-fbe0-46f2-8163-00ec9381508b/account-auditor/0.log" Oct 07 08:56:56 crc kubenswrapper[4875]: I1007 08:56:56.866264 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_97048952-fbe0-46f2-8163-00ec9381508b/account-reaper/0.log" Oct 07 08:56:57 crc kubenswrapper[4875]: I1007 08:56:57.071535 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_97048952-fbe0-46f2-8163-00ec9381508b/account-server/0.log" Oct 07 08:56:57 crc kubenswrapper[4875]: I1007 08:56:57.089247 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_97048952-fbe0-46f2-8163-00ec9381508b/account-replicator/0.log" Oct 07 08:56:57 crc kubenswrapper[4875]: I1007 08:56:57.102467 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_97048952-fbe0-46f2-8163-00ec9381508b/container-auditor/0.log" Oct 07 08:56:57 crc kubenswrapper[4875]: I1007 08:56:57.358301 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_97048952-fbe0-46f2-8163-00ec9381508b/container-server/0.log" Oct 07 08:56:57 crc kubenswrapper[4875]: I1007 08:56:57.386379 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_97048952-fbe0-46f2-8163-00ec9381508b/container-replicator/0.log" Oct 07 08:56:57 crc kubenswrapper[4875]: I1007 08:56:57.403309 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_97048952-fbe0-46f2-8163-00ec9381508b/container-updater/0.log" Oct 07 08:56:57 crc kubenswrapper[4875]: I1007 08:56:57.610087 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_97048952-fbe0-46f2-8163-00ec9381508b/object-auditor/0.log" Oct 07 08:56:57 crc kubenswrapper[4875]: I1007 08:56:57.612019 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_97048952-fbe0-46f2-8163-00ec9381508b/object-expirer/0.log" Oct 07 08:56:57 crc kubenswrapper[4875]: I1007 08:56:57.652244 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_97048952-fbe0-46f2-8163-00ec9381508b/object-replicator/0.log" Oct 07 08:56:57 crc kubenswrapper[4875]: I1007 08:56:57.791335 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_97048952-fbe0-46f2-8163-00ec9381508b/object-server/0.log" Oct 07 08:56:57 crc kubenswrapper[4875]: I1007 08:56:57.858469 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_97048952-fbe0-46f2-8163-00ec9381508b/object-updater/0.log" Oct 07 08:56:57 crc kubenswrapper[4875]: I1007 08:56:57.887463 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_97048952-fbe0-46f2-8163-00ec9381508b/rsync/0.log" Oct 07 08:56:58 crc kubenswrapper[4875]: I1007 08:56:58.115123 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_97048952-fbe0-46f2-8163-00ec9381508b/swift-recon-cron/0.log" Oct 07 08:56:58 crc kubenswrapper[4875]: I1007 08:56:58.195790 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_telemetry-edpm-deployment-openstack-edpm-ipam-bm42x_b06f38f5-d4e4-4de8-aab3-f171fc82d880/telemetry-edpm-deployment-openstack-edpm-ipam/0.log" Oct 07 08:56:58 crc kubenswrapper[4875]: I1007 08:56:58.394012 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_tempest-tests-tempest_b9d21853-761a-4786-baa2-e0e00011a9d5/tempest-tests-tempest-tests-runner/0.log" Oct 07 08:56:58 crc kubenswrapper[4875]: I1007 08:56:58.641724 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_test-operator-logs-pod-tempest-tempest-tests-tempest_f90c30e8-e60e-415e-b547-e02a254b8f24/test-operator-logs-container/0.log" Oct 07 08:56:58 crc kubenswrapper[4875]: I1007 08:56:58.685151 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_validate-network-edpm-deployment-openstack-edpm-ipam-lzlw5_7182b81e-0002-4025-ab76-31844db2d768/validate-network-edpm-deployment-openstack-edpm-ipam/0.log" Oct 07 08:57:05 crc kubenswrapper[4875]: I1007 08:57:05.235224 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_memcached-0_4cc2ad0d-e745-4a93-a5fb-d9f891bc3474/memcached/0.log" Oct 07 08:57:07 crc kubenswrapper[4875]: I1007 08:57:07.698695 4875 scope.go:117] "RemoveContainer" containerID="df6fd2d1c37696e37bee974a5fbbdc769e47de499d9e7a0feb044da0ecedbca3" Oct 07 08:57:07 crc kubenswrapper[4875]: E1007 08:57:07.699942 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hx68m_openshift-machine-config-operator(3928c10c-c3da-41eb-96b2-629d67cfb31f)\"" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" podUID="3928c10c-c3da-41eb-96b2-629d67cfb31f" Oct 07 08:57:22 crc kubenswrapper[4875]: I1007 08:57:22.698530 4875 scope.go:117] "RemoveContainer" containerID="df6fd2d1c37696e37bee974a5fbbdc769e47de499d9e7a0feb044da0ecedbca3" Oct 07 08:57:22 crc kubenswrapper[4875]: E1007 08:57:22.699421 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hx68m_openshift-machine-config-operator(3928c10c-c3da-41eb-96b2-629d67cfb31f)\"" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" podUID="3928c10c-c3da-41eb-96b2-629d67cfb31f" Oct 07 08:57:37 crc kubenswrapper[4875]: I1007 08:57:37.700596 4875 scope.go:117] "RemoveContainer" containerID="df6fd2d1c37696e37bee974a5fbbdc769e47de499d9e7a0feb044da0ecedbca3" Oct 07 08:57:37 crc kubenswrapper[4875]: E1007 08:57:37.702860 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hx68m_openshift-machine-config-operator(3928c10c-c3da-41eb-96b2-629d67cfb31f)\"" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" podUID="3928c10c-c3da-41eb-96b2-629d67cfb31f" Oct 07 08:57:50 crc kubenswrapper[4875]: I1007 08:57:50.697777 4875 scope.go:117] "RemoveContainer" containerID="df6fd2d1c37696e37bee974a5fbbdc769e47de499d9e7a0feb044da0ecedbca3" Oct 07 08:57:50 crc kubenswrapper[4875]: E1007 08:57:50.699002 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hx68m_openshift-machine-config-operator(3928c10c-c3da-41eb-96b2-629d67cfb31f)\"" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" podUID="3928c10c-c3da-41eb-96b2-629d67cfb31f" Oct 07 08:58:01 crc kubenswrapper[4875]: I1007 08:58:01.031764 4875 generic.go:334] "Generic (PLEG): container finished" podID="a5863358-3d8a-4837-babd-3b3c255d5452" containerID="bb93a8191cb0b82d9cb1a1e926eaa0b23ea3347168bda4f60d42ad89d410acdf" exitCode=0 Oct 07 08:58:01 crc kubenswrapper[4875]: I1007 08:58:01.031904 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-wbnxf/crc-debug-5rnxx" event={"ID":"a5863358-3d8a-4837-babd-3b3c255d5452","Type":"ContainerDied","Data":"bb93a8191cb0b82d9cb1a1e926eaa0b23ea3347168bda4f60d42ad89d410acdf"} Oct 07 08:58:02 crc kubenswrapper[4875]: I1007 08:58:02.154830 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-wbnxf/crc-debug-5rnxx" Oct 07 08:58:02 crc kubenswrapper[4875]: I1007 08:58:02.195570 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-wbnxf/crc-debug-5rnxx"] Oct 07 08:58:02 crc kubenswrapper[4875]: I1007 08:58:02.205582 4875 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-wbnxf/crc-debug-5rnxx"] Oct 07 08:58:02 crc kubenswrapper[4875]: I1007 08:58:02.228106 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/a5863358-3d8a-4837-babd-3b3c255d5452-host\") pod \"a5863358-3d8a-4837-babd-3b3c255d5452\" (UID: \"a5863358-3d8a-4837-babd-3b3c255d5452\") " Oct 07 08:58:02 crc kubenswrapper[4875]: I1007 08:58:02.228222 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fhs2p\" (UniqueName: \"kubernetes.io/projected/a5863358-3d8a-4837-babd-3b3c255d5452-kube-api-access-fhs2p\") pod \"a5863358-3d8a-4837-babd-3b3c255d5452\" (UID: \"a5863358-3d8a-4837-babd-3b3c255d5452\") " Oct 07 08:58:02 crc kubenswrapper[4875]: I1007 08:58:02.228231 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/a5863358-3d8a-4837-babd-3b3c255d5452-host" (OuterVolumeSpecName: "host") pod "a5863358-3d8a-4837-babd-3b3c255d5452" (UID: "a5863358-3d8a-4837-babd-3b3c255d5452"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 07 08:58:02 crc kubenswrapper[4875]: I1007 08:58:02.228988 4875 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/a5863358-3d8a-4837-babd-3b3c255d5452-host\") on node \"crc\" DevicePath \"\"" Oct 07 08:58:02 crc kubenswrapper[4875]: I1007 08:58:02.234815 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a5863358-3d8a-4837-babd-3b3c255d5452-kube-api-access-fhs2p" (OuterVolumeSpecName: "kube-api-access-fhs2p") pod "a5863358-3d8a-4837-babd-3b3c255d5452" (UID: "a5863358-3d8a-4837-babd-3b3c255d5452"). InnerVolumeSpecName "kube-api-access-fhs2p". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 08:58:02 crc kubenswrapper[4875]: I1007 08:58:02.330394 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fhs2p\" (UniqueName: \"kubernetes.io/projected/a5863358-3d8a-4837-babd-3b3c255d5452-kube-api-access-fhs2p\") on node \"crc\" DevicePath \"\"" Oct 07 08:58:02 crc kubenswrapper[4875]: I1007 08:58:02.697373 4875 scope.go:117] "RemoveContainer" containerID="df6fd2d1c37696e37bee974a5fbbdc769e47de499d9e7a0feb044da0ecedbca3" Oct 07 08:58:02 crc kubenswrapper[4875]: E1007 08:58:02.698219 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hx68m_openshift-machine-config-operator(3928c10c-c3da-41eb-96b2-629d67cfb31f)\"" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" podUID="3928c10c-c3da-41eb-96b2-629d67cfb31f" Oct 07 08:58:03 crc kubenswrapper[4875]: I1007 08:58:03.054691 4875 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7d9b0bb96ab73e5194915d68ad08da7555b74042b2e49260e5c0216978b307ca" Oct 07 08:58:03 crc kubenswrapper[4875]: I1007 08:58:03.054779 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-wbnxf/crc-debug-5rnxx" Oct 07 08:58:03 crc kubenswrapper[4875]: I1007 08:58:03.375544 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-wbnxf/crc-debug-vnz7x"] Oct 07 08:58:03 crc kubenswrapper[4875]: E1007 08:58:03.376314 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bb99ec26-051f-4047-a2cf-b125e8f961e8" containerName="registry-server" Oct 07 08:58:03 crc kubenswrapper[4875]: I1007 08:58:03.376332 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="bb99ec26-051f-4047-a2cf-b125e8f961e8" containerName="registry-server" Oct 07 08:58:03 crc kubenswrapper[4875]: E1007 08:58:03.376357 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bb99ec26-051f-4047-a2cf-b125e8f961e8" containerName="extract-content" Oct 07 08:58:03 crc kubenswrapper[4875]: I1007 08:58:03.376364 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="bb99ec26-051f-4047-a2cf-b125e8f961e8" containerName="extract-content" Oct 07 08:58:03 crc kubenswrapper[4875]: E1007 08:58:03.376377 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a5863358-3d8a-4837-babd-3b3c255d5452" containerName="container-00" Oct 07 08:58:03 crc kubenswrapper[4875]: I1007 08:58:03.376383 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="a5863358-3d8a-4837-babd-3b3c255d5452" containerName="container-00" Oct 07 08:58:03 crc kubenswrapper[4875]: E1007 08:58:03.376410 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bb99ec26-051f-4047-a2cf-b125e8f961e8" containerName="extract-utilities" Oct 07 08:58:03 crc kubenswrapper[4875]: I1007 08:58:03.376416 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="bb99ec26-051f-4047-a2cf-b125e8f961e8" containerName="extract-utilities" Oct 07 08:58:03 crc kubenswrapper[4875]: I1007 08:58:03.376652 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="a5863358-3d8a-4837-babd-3b3c255d5452" containerName="container-00" Oct 07 08:58:03 crc kubenswrapper[4875]: I1007 08:58:03.376674 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="bb99ec26-051f-4047-a2cf-b125e8f961e8" containerName="registry-server" Oct 07 08:58:03 crc kubenswrapper[4875]: I1007 08:58:03.377429 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-wbnxf/crc-debug-vnz7x" Oct 07 08:58:03 crc kubenswrapper[4875]: I1007 08:58:03.450319 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/137e7dd4-3756-4b75-a17e-442834a956b2-host\") pod \"crc-debug-vnz7x\" (UID: \"137e7dd4-3756-4b75-a17e-442834a956b2\") " pod="openshift-must-gather-wbnxf/crc-debug-vnz7x" Oct 07 08:58:03 crc kubenswrapper[4875]: I1007 08:58:03.450389 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hbxgk\" (UniqueName: \"kubernetes.io/projected/137e7dd4-3756-4b75-a17e-442834a956b2-kube-api-access-hbxgk\") pod \"crc-debug-vnz7x\" (UID: \"137e7dd4-3756-4b75-a17e-442834a956b2\") " pod="openshift-must-gather-wbnxf/crc-debug-vnz7x" Oct 07 08:58:03 crc kubenswrapper[4875]: I1007 08:58:03.553328 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/137e7dd4-3756-4b75-a17e-442834a956b2-host\") pod \"crc-debug-vnz7x\" (UID: \"137e7dd4-3756-4b75-a17e-442834a956b2\") " pod="openshift-must-gather-wbnxf/crc-debug-vnz7x" Oct 07 08:58:03 crc kubenswrapper[4875]: I1007 08:58:03.553422 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hbxgk\" (UniqueName: \"kubernetes.io/projected/137e7dd4-3756-4b75-a17e-442834a956b2-kube-api-access-hbxgk\") pod \"crc-debug-vnz7x\" (UID: \"137e7dd4-3756-4b75-a17e-442834a956b2\") " pod="openshift-must-gather-wbnxf/crc-debug-vnz7x" Oct 07 08:58:03 crc kubenswrapper[4875]: I1007 08:58:03.553673 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/137e7dd4-3756-4b75-a17e-442834a956b2-host\") pod \"crc-debug-vnz7x\" (UID: \"137e7dd4-3756-4b75-a17e-442834a956b2\") " pod="openshift-must-gather-wbnxf/crc-debug-vnz7x" Oct 07 08:58:03 crc kubenswrapper[4875]: I1007 08:58:03.578759 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hbxgk\" (UniqueName: \"kubernetes.io/projected/137e7dd4-3756-4b75-a17e-442834a956b2-kube-api-access-hbxgk\") pod \"crc-debug-vnz7x\" (UID: \"137e7dd4-3756-4b75-a17e-442834a956b2\") " pod="openshift-must-gather-wbnxf/crc-debug-vnz7x" Oct 07 08:58:03 crc kubenswrapper[4875]: I1007 08:58:03.705063 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-wbnxf/crc-debug-vnz7x" Oct 07 08:58:03 crc kubenswrapper[4875]: I1007 08:58:03.710439 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a5863358-3d8a-4837-babd-3b3c255d5452" path="/var/lib/kubelet/pods/a5863358-3d8a-4837-babd-3b3c255d5452/volumes" Oct 07 08:58:04 crc kubenswrapper[4875]: I1007 08:58:04.064908 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-wbnxf/crc-debug-vnz7x" event={"ID":"137e7dd4-3756-4b75-a17e-442834a956b2","Type":"ContainerStarted","Data":"50b86ce614a1e876314892fc17942263e38b116263704dd9452e53d3b78caf35"} Oct 07 08:58:04 crc kubenswrapper[4875]: I1007 08:58:04.064968 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-wbnxf/crc-debug-vnz7x" event={"ID":"137e7dd4-3756-4b75-a17e-442834a956b2","Type":"ContainerStarted","Data":"bb74293d7ed33a66165f2b84e9335c4a63903d9bbaba77908b126981c0c1ee08"} Oct 07 08:58:04 crc kubenswrapper[4875]: I1007 08:58:04.082781 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-wbnxf/crc-debug-vnz7x" podStartSLOduration=1.082761115 podStartE2EDuration="1.082761115s" podCreationTimestamp="2025-10-07 08:58:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 08:58:04.079852842 +0000 UTC m=+3709.039623405" watchObservedRunningTime="2025-10-07 08:58:04.082761115 +0000 UTC m=+3709.042531658" Oct 07 08:58:05 crc kubenswrapper[4875]: I1007 08:58:05.080859 4875 generic.go:334] "Generic (PLEG): container finished" podID="137e7dd4-3756-4b75-a17e-442834a956b2" containerID="50b86ce614a1e876314892fc17942263e38b116263704dd9452e53d3b78caf35" exitCode=0 Oct 07 08:58:05 crc kubenswrapper[4875]: I1007 08:58:05.080918 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-wbnxf/crc-debug-vnz7x" event={"ID":"137e7dd4-3756-4b75-a17e-442834a956b2","Type":"ContainerDied","Data":"50b86ce614a1e876314892fc17942263e38b116263704dd9452e53d3b78caf35"} Oct 07 08:58:06 crc kubenswrapper[4875]: I1007 08:58:06.266382 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-wbnxf/crc-debug-vnz7x" Oct 07 08:58:06 crc kubenswrapper[4875]: I1007 08:58:06.399002 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-fpwf5"] Oct 07 08:58:06 crc kubenswrapper[4875]: E1007 08:58:06.399763 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="137e7dd4-3756-4b75-a17e-442834a956b2" containerName="container-00" Oct 07 08:58:06 crc kubenswrapper[4875]: I1007 08:58:06.399781 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="137e7dd4-3756-4b75-a17e-442834a956b2" containerName="container-00" Oct 07 08:58:06 crc kubenswrapper[4875]: I1007 08:58:06.400078 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="137e7dd4-3756-4b75-a17e-442834a956b2" containerName="container-00" Oct 07 08:58:06 crc kubenswrapper[4875]: I1007 08:58:06.404254 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-fpwf5" Oct 07 08:58:06 crc kubenswrapper[4875]: I1007 08:58:06.410818 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-fpwf5"] Oct 07 08:58:06 crc kubenswrapper[4875]: I1007 08:58:06.413223 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/137e7dd4-3756-4b75-a17e-442834a956b2-host\") pod \"137e7dd4-3756-4b75-a17e-442834a956b2\" (UID: \"137e7dd4-3756-4b75-a17e-442834a956b2\") " Oct 07 08:58:06 crc kubenswrapper[4875]: I1007 08:58:06.413393 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hbxgk\" (UniqueName: \"kubernetes.io/projected/137e7dd4-3756-4b75-a17e-442834a956b2-kube-api-access-hbxgk\") pod \"137e7dd4-3756-4b75-a17e-442834a956b2\" (UID: \"137e7dd4-3756-4b75-a17e-442834a956b2\") " Oct 07 08:58:06 crc kubenswrapper[4875]: I1007 08:58:06.413904 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/137e7dd4-3756-4b75-a17e-442834a956b2-host" (OuterVolumeSpecName: "host") pod "137e7dd4-3756-4b75-a17e-442834a956b2" (UID: "137e7dd4-3756-4b75-a17e-442834a956b2"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 07 08:58:06 crc kubenswrapper[4875]: I1007 08:58:06.426606 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/137e7dd4-3756-4b75-a17e-442834a956b2-kube-api-access-hbxgk" (OuterVolumeSpecName: "kube-api-access-hbxgk") pod "137e7dd4-3756-4b75-a17e-442834a956b2" (UID: "137e7dd4-3756-4b75-a17e-442834a956b2"). InnerVolumeSpecName "kube-api-access-hbxgk". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 08:58:06 crc kubenswrapper[4875]: I1007 08:58:06.515329 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6w4zm\" (UniqueName: \"kubernetes.io/projected/92781bfa-9f68-4be9-a393-9a40fa2b4e52-kube-api-access-6w4zm\") pod \"redhat-operators-fpwf5\" (UID: \"92781bfa-9f68-4be9-a393-9a40fa2b4e52\") " pod="openshift-marketplace/redhat-operators-fpwf5" Oct 07 08:58:06 crc kubenswrapper[4875]: I1007 08:58:06.516355 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/92781bfa-9f68-4be9-a393-9a40fa2b4e52-catalog-content\") pod \"redhat-operators-fpwf5\" (UID: \"92781bfa-9f68-4be9-a393-9a40fa2b4e52\") " pod="openshift-marketplace/redhat-operators-fpwf5" Oct 07 08:58:06 crc kubenswrapper[4875]: I1007 08:58:06.517853 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/92781bfa-9f68-4be9-a393-9a40fa2b4e52-utilities\") pod \"redhat-operators-fpwf5\" (UID: \"92781bfa-9f68-4be9-a393-9a40fa2b4e52\") " pod="openshift-marketplace/redhat-operators-fpwf5" Oct 07 08:58:06 crc kubenswrapper[4875]: I1007 08:58:06.518597 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hbxgk\" (UniqueName: \"kubernetes.io/projected/137e7dd4-3756-4b75-a17e-442834a956b2-kube-api-access-hbxgk\") on node \"crc\" DevicePath \"\"" Oct 07 08:58:06 crc kubenswrapper[4875]: I1007 08:58:06.518936 4875 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/137e7dd4-3756-4b75-a17e-442834a956b2-host\") on node \"crc\" DevicePath \"\"" Oct 07 08:58:06 crc kubenswrapper[4875]: I1007 08:58:06.621077 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/92781bfa-9f68-4be9-a393-9a40fa2b4e52-utilities\") pod \"redhat-operators-fpwf5\" (UID: \"92781bfa-9f68-4be9-a393-9a40fa2b4e52\") " pod="openshift-marketplace/redhat-operators-fpwf5" Oct 07 08:58:06 crc kubenswrapper[4875]: I1007 08:58:06.621316 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6w4zm\" (UniqueName: \"kubernetes.io/projected/92781bfa-9f68-4be9-a393-9a40fa2b4e52-kube-api-access-6w4zm\") pod \"redhat-operators-fpwf5\" (UID: \"92781bfa-9f68-4be9-a393-9a40fa2b4e52\") " pod="openshift-marketplace/redhat-operators-fpwf5" Oct 07 08:58:06 crc kubenswrapper[4875]: I1007 08:58:06.621362 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/92781bfa-9f68-4be9-a393-9a40fa2b4e52-catalog-content\") pod \"redhat-operators-fpwf5\" (UID: \"92781bfa-9f68-4be9-a393-9a40fa2b4e52\") " pod="openshift-marketplace/redhat-operators-fpwf5" Oct 07 08:58:06 crc kubenswrapper[4875]: I1007 08:58:06.622024 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/92781bfa-9f68-4be9-a393-9a40fa2b4e52-utilities\") pod \"redhat-operators-fpwf5\" (UID: \"92781bfa-9f68-4be9-a393-9a40fa2b4e52\") " pod="openshift-marketplace/redhat-operators-fpwf5" Oct 07 08:58:06 crc kubenswrapper[4875]: I1007 08:58:06.622766 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/92781bfa-9f68-4be9-a393-9a40fa2b4e52-catalog-content\") pod \"redhat-operators-fpwf5\" (UID: \"92781bfa-9f68-4be9-a393-9a40fa2b4e52\") " pod="openshift-marketplace/redhat-operators-fpwf5" Oct 07 08:58:06 crc kubenswrapper[4875]: I1007 08:58:06.647929 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6w4zm\" (UniqueName: \"kubernetes.io/projected/92781bfa-9f68-4be9-a393-9a40fa2b4e52-kube-api-access-6w4zm\") pod \"redhat-operators-fpwf5\" (UID: \"92781bfa-9f68-4be9-a393-9a40fa2b4e52\") " pod="openshift-marketplace/redhat-operators-fpwf5" Oct 07 08:58:06 crc kubenswrapper[4875]: I1007 08:58:06.773799 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-fpwf5" Oct 07 08:58:07 crc kubenswrapper[4875]: I1007 08:58:07.117757 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-wbnxf/crc-debug-vnz7x" event={"ID":"137e7dd4-3756-4b75-a17e-442834a956b2","Type":"ContainerDied","Data":"bb74293d7ed33a66165f2b84e9335c4a63903d9bbaba77908b126981c0c1ee08"} Oct 07 08:58:07 crc kubenswrapper[4875]: I1007 08:58:07.118237 4875 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="bb74293d7ed33a66165f2b84e9335c4a63903d9bbaba77908b126981c0c1ee08" Oct 07 08:58:07 crc kubenswrapper[4875]: I1007 08:58:07.118027 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-wbnxf/crc-debug-vnz7x" Oct 07 08:58:07 crc kubenswrapper[4875]: I1007 08:58:07.333574 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-fpwf5"] Oct 07 08:58:08 crc kubenswrapper[4875]: I1007 08:58:08.130232 4875 generic.go:334] "Generic (PLEG): container finished" podID="92781bfa-9f68-4be9-a393-9a40fa2b4e52" containerID="645aa4c3740182e2f4d8d178192e6ecf7c1ae6b9b182f3bb91631961c5c8401b" exitCode=0 Oct 07 08:58:08 crc kubenswrapper[4875]: I1007 08:58:08.130298 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fpwf5" event={"ID":"92781bfa-9f68-4be9-a393-9a40fa2b4e52","Type":"ContainerDied","Data":"645aa4c3740182e2f4d8d178192e6ecf7c1ae6b9b182f3bb91631961c5c8401b"} Oct 07 08:58:08 crc kubenswrapper[4875]: I1007 08:58:08.130641 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fpwf5" event={"ID":"92781bfa-9f68-4be9-a393-9a40fa2b4e52","Type":"ContainerStarted","Data":"91a5636ea1fcc093ce058e9f827465df3951dcf07189cba5117e6893d182f475"} Oct 07 08:58:12 crc kubenswrapper[4875]: I1007 08:58:12.422309 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-wbnxf/crc-debug-vnz7x"] Oct 07 08:58:12 crc kubenswrapper[4875]: I1007 08:58:12.431739 4875 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-wbnxf/crc-debug-vnz7x"] Oct 07 08:58:13 crc kubenswrapper[4875]: I1007 08:58:13.595570 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-wbnxf/crc-debug-jjv8v"] Oct 07 08:58:13 crc kubenswrapper[4875]: I1007 08:58:13.597638 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-wbnxf/crc-debug-jjv8v" Oct 07 08:58:13 crc kubenswrapper[4875]: I1007 08:58:13.695827 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/3bbeaa56-147d-4e2c-b10d-68341c9e41ba-host\") pod \"crc-debug-jjv8v\" (UID: \"3bbeaa56-147d-4e2c-b10d-68341c9e41ba\") " pod="openshift-must-gather-wbnxf/crc-debug-jjv8v" Oct 07 08:58:13 crc kubenswrapper[4875]: I1007 08:58:13.696126 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rtp2w\" (UniqueName: \"kubernetes.io/projected/3bbeaa56-147d-4e2c-b10d-68341c9e41ba-kube-api-access-rtp2w\") pod \"crc-debug-jjv8v\" (UID: \"3bbeaa56-147d-4e2c-b10d-68341c9e41ba\") " pod="openshift-must-gather-wbnxf/crc-debug-jjv8v" Oct 07 08:58:13 crc kubenswrapper[4875]: I1007 08:58:13.709279 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="137e7dd4-3756-4b75-a17e-442834a956b2" path="/var/lib/kubelet/pods/137e7dd4-3756-4b75-a17e-442834a956b2/volumes" Oct 07 08:58:13 crc kubenswrapper[4875]: I1007 08:58:13.798807 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rtp2w\" (UniqueName: \"kubernetes.io/projected/3bbeaa56-147d-4e2c-b10d-68341c9e41ba-kube-api-access-rtp2w\") pod \"crc-debug-jjv8v\" (UID: \"3bbeaa56-147d-4e2c-b10d-68341c9e41ba\") " pod="openshift-must-gather-wbnxf/crc-debug-jjv8v" Oct 07 08:58:13 crc kubenswrapper[4875]: I1007 08:58:13.799032 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/3bbeaa56-147d-4e2c-b10d-68341c9e41ba-host\") pod \"crc-debug-jjv8v\" (UID: \"3bbeaa56-147d-4e2c-b10d-68341c9e41ba\") " pod="openshift-must-gather-wbnxf/crc-debug-jjv8v" Oct 07 08:58:13 crc kubenswrapper[4875]: I1007 08:58:13.802610 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/3bbeaa56-147d-4e2c-b10d-68341c9e41ba-host\") pod \"crc-debug-jjv8v\" (UID: \"3bbeaa56-147d-4e2c-b10d-68341c9e41ba\") " pod="openshift-must-gather-wbnxf/crc-debug-jjv8v" Oct 07 08:58:13 crc kubenswrapper[4875]: I1007 08:58:13.822154 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rtp2w\" (UniqueName: \"kubernetes.io/projected/3bbeaa56-147d-4e2c-b10d-68341c9e41ba-kube-api-access-rtp2w\") pod \"crc-debug-jjv8v\" (UID: \"3bbeaa56-147d-4e2c-b10d-68341c9e41ba\") " pod="openshift-must-gather-wbnxf/crc-debug-jjv8v" Oct 07 08:58:13 crc kubenswrapper[4875]: I1007 08:58:13.918080 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-wbnxf/crc-debug-jjv8v" Oct 07 08:58:16 crc kubenswrapper[4875]: I1007 08:58:16.698104 4875 scope.go:117] "RemoveContainer" containerID="df6fd2d1c37696e37bee974a5fbbdc769e47de499d9e7a0feb044da0ecedbca3" Oct 07 08:58:16 crc kubenswrapper[4875]: E1007 08:58:16.698435 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hx68m_openshift-machine-config-operator(3928c10c-c3da-41eb-96b2-629d67cfb31f)\"" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" podUID="3928c10c-c3da-41eb-96b2-629d67cfb31f" Oct 07 08:58:22 crc kubenswrapper[4875]: I1007 08:58:22.273907 4875 generic.go:334] "Generic (PLEG): container finished" podID="3bbeaa56-147d-4e2c-b10d-68341c9e41ba" containerID="94d259f1090613e03fbdea91e4356faae49517d29e90cf3757f2ced791ee7f36" exitCode=0 Oct 07 08:58:22 crc kubenswrapper[4875]: I1007 08:58:22.274063 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-wbnxf/crc-debug-jjv8v" event={"ID":"3bbeaa56-147d-4e2c-b10d-68341c9e41ba","Type":"ContainerDied","Data":"94d259f1090613e03fbdea91e4356faae49517d29e90cf3757f2ced791ee7f36"} Oct 07 08:58:22 crc kubenswrapper[4875]: I1007 08:58:22.274623 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-wbnxf/crc-debug-jjv8v" event={"ID":"3bbeaa56-147d-4e2c-b10d-68341c9e41ba","Type":"ContainerStarted","Data":"2d21107b9aa9ba3c38b3a5259d08ad4e68289cee2ecaffd4b6f5e4984722acf4"} Oct 07 08:58:22 crc kubenswrapper[4875]: I1007 08:58:22.276863 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fpwf5" event={"ID":"92781bfa-9f68-4be9-a393-9a40fa2b4e52","Type":"ContainerStarted","Data":"5d63a2b8ab8e50288d3f7ebc1fb633dfeefc985f51d97585379f540005cb4296"} Oct 07 08:58:23 crc kubenswrapper[4875]: I1007 08:58:23.286944 4875 generic.go:334] "Generic (PLEG): container finished" podID="92781bfa-9f68-4be9-a393-9a40fa2b4e52" containerID="5d63a2b8ab8e50288d3f7ebc1fb633dfeefc985f51d97585379f540005cb4296" exitCode=0 Oct 07 08:58:23 crc kubenswrapper[4875]: I1007 08:58:23.287035 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fpwf5" event={"ID":"92781bfa-9f68-4be9-a393-9a40fa2b4e52","Type":"ContainerDied","Data":"5d63a2b8ab8e50288d3f7ebc1fb633dfeefc985f51d97585379f540005cb4296"} Oct 07 08:58:24 crc kubenswrapper[4875]: I1007 08:58:24.338433 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-wbnxf/crc-debug-jjv8v"] Oct 07 08:58:24 crc kubenswrapper[4875]: I1007 08:58:24.346902 4875 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-wbnxf/crc-debug-jjv8v"] Oct 07 08:58:25 crc kubenswrapper[4875]: I1007 08:58:25.312920 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fpwf5" event={"ID":"92781bfa-9f68-4be9-a393-9a40fa2b4e52","Type":"ContainerStarted","Data":"d1f86ed7720a1f7ca6b7dbf80b4a5dde8f26b53bcdeff62356353ee74044ce69"} Oct 07 08:58:25 crc kubenswrapper[4875]: I1007 08:58:25.333853 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-fpwf5" podStartSLOduration=2.882882824 podStartE2EDuration="19.333836972s" podCreationTimestamp="2025-10-07 08:58:06 +0000 UTC" firstStartedPulling="2025-10-07 08:58:08.132690156 +0000 UTC m=+3713.092460699" lastFinishedPulling="2025-10-07 08:58:24.583644304 +0000 UTC m=+3729.543414847" observedRunningTime="2025-10-07 08:58:25.332378776 +0000 UTC m=+3730.292149319" watchObservedRunningTime="2025-10-07 08:58:25.333836972 +0000 UTC m=+3730.293607515" Oct 07 08:58:25 crc kubenswrapper[4875]: I1007 08:58:25.425280 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-wbnxf/crc-debug-jjv8v" Oct 07 08:58:25 crc kubenswrapper[4875]: I1007 08:58:25.539438 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rtp2w\" (UniqueName: \"kubernetes.io/projected/3bbeaa56-147d-4e2c-b10d-68341c9e41ba-kube-api-access-rtp2w\") pod \"3bbeaa56-147d-4e2c-b10d-68341c9e41ba\" (UID: \"3bbeaa56-147d-4e2c-b10d-68341c9e41ba\") " Oct 07 08:58:25 crc kubenswrapper[4875]: I1007 08:58:25.539511 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/3bbeaa56-147d-4e2c-b10d-68341c9e41ba-host\") pod \"3bbeaa56-147d-4e2c-b10d-68341c9e41ba\" (UID: \"3bbeaa56-147d-4e2c-b10d-68341c9e41ba\") " Oct 07 08:58:25 crc kubenswrapper[4875]: I1007 08:58:25.540097 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3bbeaa56-147d-4e2c-b10d-68341c9e41ba-host" (OuterVolumeSpecName: "host") pod "3bbeaa56-147d-4e2c-b10d-68341c9e41ba" (UID: "3bbeaa56-147d-4e2c-b10d-68341c9e41ba"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 07 08:58:25 crc kubenswrapper[4875]: I1007 08:58:25.540450 4875 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/3bbeaa56-147d-4e2c-b10d-68341c9e41ba-host\") on node \"crc\" DevicePath \"\"" Oct 07 08:58:25 crc kubenswrapper[4875]: I1007 08:58:25.546779 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3bbeaa56-147d-4e2c-b10d-68341c9e41ba-kube-api-access-rtp2w" (OuterVolumeSpecName: "kube-api-access-rtp2w") pod "3bbeaa56-147d-4e2c-b10d-68341c9e41ba" (UID: "3bbeaa56-147d-4e2c-b10d-68341c9e41ba"). InnerVolumeSpecName "kube-api-access-rtp2w". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 08:58:25 crc kubenswrapper[4875]: I1007 08:58:25.642496 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rtp2w\" (UniqueName: \"kubernetes.io/projected/3bbeaa56-147d-4e2c-b10d-68341c9e41ba-kube-api-access-rtp2w\") on node \"crc\" DevicePath \"\"" Oct 07 08:58:25 crc kubenswrapper[4875]: I1007 08:58:25.709545 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3bbeaa56-147d-4e2c-b10d-68341c9e41ba" path="/var/lib/kubelet/pods/3bbeaa56-147d-4e2c-b10d-68341c9e41ba/volumes" Oct 07 08:58:26 crc kubenswrapper[4875]: I1007 08:58:26.021414 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_90bf58ce7772111da4507f6aa87e868c7b0c73a47911b20bcd38811633gjmhr_5a54ab75-829f-4945-9c7e-c4566c15b8e2/util/0.log" Oct 07 08:58:26 crc kubenswrapper[4875]: I1007 08:58:26.258727 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_90bf58ce7772111da4507f6aa87e868c7b0c73a47911b20bcd38811633gjmhr_5a54ab75-829f-4945-9c7e-c4566c15b8e2/pull/0.log" Oct 07 08:58:26 crc kubenswrapper[4875]: I1007 08:58:26.286957 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_90bf58ce7772111da4507f6aa87e868c7b0c73a47911b20bcd38811633gjmhr_5a54ab75-829f-4945-9c7e-c4566c15b8e2/util/0.log" Oct 07 08:58:26 crc kubenswrapper[4875]: I1007 08:58:26.296254 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_90bf58ce7772111da4507f6aa87e868c7b0c73a47911b20bcd38811633gjmhr_5a54ab75-829f-4945-9c7e-c4566c15b8e2/pull/0.log" Oct 07 08:58:26 crc kubenswrapper[4875]: I1007 08:58:26.322277 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-wbnxf/crc-debug-jjv8v" Oct 07 08:58:26 crc kubenswrapper[4875]: I1007 08:58:26.322307 4875 scope.go:117] "RemoveContainer" containerID="94d259f1090613e03fbdea91e4356faae49517d29e90cf3757f2ced791ee7f36" Oct 07 08:58:26 crc kubenswrapper[4875]: I1007 08:58:26.515907 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_90bf58ce7772111da4507f6aa87e868c7b0c73a47911b20bcd38811633gjmhr_5a54ab75-829f-4945-9c7e-c4566c15b8e2/util/0.log" Oct 07 08:58:26 crc kubenswrapper[4875]: I1007 08:58:26.527741 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_90bf58ce7772111da4507f6aa87e868c7b0c73a47911b20bcd38811633gjmhr_5a54ab75-829f-4945-9c7e-c4566c15b8e2/pull/0.log" Oct 07 08:58:26 crc kubenswrapper[4875]: I1007 08:58:26.554174 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_90bf58ce7772111da4507f6aa87e868c7b0c73a47911b20bcd38811633gjmhr_5a54ab75-829f-4945-9c7e-c4566c15b8e2/extract/0.log" Oct 07 08:58:26 crc kubenswrapper[4875]: I1007 08:58:26.774054 4875 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-fpwf5" Oct 07 08:58:26 crc kubenswrapper[4875]: I1007 08:58:26.774321 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-fpwf5" Oct 07 08:58:26 crc kubenswrapper[4875]: I1007 08:58:26.789698 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-58c4cd55f4-tslhs_cfd6de96-f3bd-4f0d-a076-f7c748b9b6a5/kube-rbac-proxy/0.log" Oct 07 08:58:26 crc kubenswrapper[4875]: I1007 08:58:26.789796 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-58c4cd55f4-tslhs_cfd6de96-f3bd-4f0d-a076-f7c748b9b6a5/manager/0.log" Oct 07 08:58:26 crc kubenswrapper[4875]: I1007 08:58:26.801326 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-7d4d4f8d-8chkp_ecbab85b-669d-4669-aa95-597dc630b7e6/kube-rbac-proxy/0.log" Oct 07 08:58:27 crc kubenswrapper[4875]: I1007 08:58:27.006135 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-75dfd9b554-n9gbs_0ef97224-fe40-43a3-af95-f4e1986b8fbe/kube-rbac-proxy/0.log" Oct 07 08:58:27 crc kubenswrapper[4875]: I1007 08:58:27.044537 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-7d4d4f8d-8chkp_ecbab85b-669d-4669-aa95-597dc630b7e6/manager/0.log" Oct 07 08:58:27 crc kubenswrapper[4875]: I1007 08:58:27.059582 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-75dfd9b554-n9gbs_0ef97224-fe40-43a3-af95-f4e1986b8fbe/manager/0.log" Oct 07 08:58:27 crc kubenswrapper[4875]: I1007 08:58:27.263929 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-5dc44df7d5-zpbdj_b9617ed2-0ac9-45b8-8089-1091ff8937dd/kube-rbac-proxy/0.log" Oct 07 08:58:27 crc kubenswrapper[4875]: I1007 08:58:27.383638 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-5dc44df7d5-zpbdj_b9617ed2-0ac9-45b8-8089-1091ff8937dd/manager/0.log" Oct 07 08:58:27 crc kubenswrapper[4875]: I1007 08:58:27.551908 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-54b4974c45-t97rc_0c4de86e-6c46-4292-8c13-faeff0997ac4/manager/0.log" Oct 07 08:58:27 crc kubenswrapper[4875]: I1007 08:58:27.558205 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-54b4974c45-t97rc_0c4de86e-6c46-4292-8c13-faeff0997ac4/kube-rbac-proxy/0.log" Oct 07 08:58:27 crc kubenswrapper[4875]: I1007 08:58:27.717701 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-76d5b87f47-g6cn6_52379554-390c-4cb2-97ae-0cb0596f36d1/kube-rbac-proxy/0.log" Oct 07 08:58:27 crc kubenswrapper[4875]: I1007 08:58:27.833664 4875 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-fpwf5" podUID="92781bfa-9f68-4be9-a393-9a40fa2b4e52" containerName="registry-server" probeResult="failure" output=< Oct 07 08:58:27 crc kubenswrapper[4875]: timeout: failed to connect service ":50051" within 1s Oct 07 08:58:27 crc kubenswrapper[4875]: > Oct 07 08:58:27 crc kubenswrapper[4875]: I1007 08:58:27.833676 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-76d5b87f47-g6cn6_52379554-390c-4cb2-97ae-0cb0596f36d1/manager/0.log" Oct 07 08:58:27 crc kubenswrapper[4875]: I1007 08:58:27.924634 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-658588b8c9-l77jd_0cce3a76-3617-40be-8d2b-b8f9184e6b61/kube-rbac-proxy/0.log" Oct 07 08:58:28 crc kubenswrapper[4875]: I1007 08:58:28.105109 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-649675d675-6vp52_f03c0528-11ac-4b93-8f46-4415192ba694/kube-rbac-proxy/0.log" Oct 07 08:58:28 crc kubenswrapper[4875]: I1007 08:58:28.130766 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-658588b8c9-l77jd_0cce3a76-3617-40be-8d2b-b8f9184e6b61/manager/0.log" Oct 07 08:58:28 crc kubenswrapper[4875]: I1007 08:58:28.160792 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-649675d675-6vp52_f03c0528-11ac-4b93-8f46-4415192ba694/manager/0.log" Oct 07 08:58:28 crc kubenswrapper[4875]: I1007 08:58:28.370495 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-7b5ccf6d9c-lvqtd_ab712745-89ea-43bb-b2d7-7192d3691acf/kube-rbac-proxy/0.log" Oct 07 08:58:28 crc kubenswrapper[4875]: I1007 08:58:28.379505 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-7b5ccf6d9c-lvqtd_ab712745-89ea-43bb-b2d7-7192d3691acf/manager/0.log" Oct 07 08:58:28 crc kubenswrapper[4875]: I1007 08:58:28.565122 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-65d89cfd9f-zwhfp_45a1f3f1-6fbc-47d9-81c4-77e8ede46dd8/kube-rbac-proxy/0.log" Oct 07 08:58:28 crc kubenswrapper[4875]: I1007 08:58:28.590379 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-65d89cfd9f-zwhfp_45a1f3f1-6fbc-47d9-81c4-77e8ede46dd8/manager/0.log" Oct 07 08:58:28 crc kubenswrapper[4875]: I1007 08:58:28.671509 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-6cd6d7bdf5-n4t9j_d9b65818-b46c-4a3d-8ed2-53d04e3dc834/kube-rbac-proxy/0.log" Oct 07 08:58:28 crc kubenswrapper[4875]: I1007 08:58:28.697659 4875 scope.go:117] "RemoveContainer" containerID="df6fd2d1c37696e37bee974a5fbbdc769e47de499d9e7a0feb044da0ecedbca3" Oct 07 08:58:28 crc kubenswrapper[4875]: E1007 08:58:28.697988 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hx68m_openshift-machine-config-operator(3928c10c-c3da-41eb-96b2-629d67cfb31f)\"" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" podUID="3928c10c-c3da-41eb-96b2-629d67cfb31f" Oct 07 08:58:28 crc kubenswrapper[4875]: I1007 08:58:28.802139 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-6cd6d7bdf5-n4t9j_d9b65818-b46c-4a3d-8ed2-53d04e3dc834/manager/0.log" Oct 07 08:58:28 crc kubenswrapper[4875]: I1007 08:58:28.898188 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-8d984cc4d-jcb62_04963c19-bc7b-41c9-8b48-e3b8653738a8/kube-rbac-proxy/0.log" Oct 07 08:58:28 crc kubenswrapper[4875]: I1007 08:58:28.957259 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-8d984cc4d-jcb62_04963c19-bc7b-41c9-8b48-e3b8653738a8/manager/0.log" Oct 07 08:58:29 crc kubenswrapper[4875]: I1007 08:58:29.071733 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-7c7fc454ff-2s4ks_7b5bb2f7-a322-4af4-81dc-d9b104b2bb85/kube-rbac-proxy/0.log" Oct 07 08:58:29 crc kubenswrapper[4875]: I1007 08:58:29.266200 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-7468f855d8-wr55g_d848105d-5f25-435d-bc92-fc6f9eac9749/kube-rbac-proxy/0.log" Oct 07 08:58:29 crc kubenswrapper[4875]: I1007 08:58:29.270308 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-7c7fc454ff-2s4ks_7b5bb2f7-a322-4af4-81dc-d9b104b2bb85/manager/0.log" Oct 07 08:58:29 crc kubenswrapper[4875]: I1007 08:58:29.294456 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-7468f855d8-wr55g_d848105d-5f25-435d-bc92-fc6f9eac9749/manager/0.log" Oct 07 08:58:29 crc kubenswrapper[4875]: I1007 08:58:29.488811 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-5dfbbd665cxk99t_052c6076-1098-41ac-a80d-11879a2e08bc/kube-rbac-proxy/0.log" Oct 07 08:58:29 crc kubenswrapper[4875]: I1007 08:58:29.510428 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-5dfbbd665cxk99t_052c6076-1098-41ac-a80d-11879a2e08bc/manager/0.log" Oct 07 08:58:29 crc kubenswrapper[4875]: I1007 08:58:29.732258 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-667f8c4c67-cltql_1f9b6e86-8f57-4622-bc91-b062f04ec29f/kube-rbac-proxy/0.log" Oct 07 08:58:29 crc kubenswrapper[4875]: I1007 08:58:29.858103 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-fb44c8bf6-4qlwf_4180c049-4ede-4df6-929d-4a3250404f38/kube-rbac-proxy/0.log" Oct 07 08:58:30 crc kubenswrapper[4875]: I1007 08:58:30.118349 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-index-l2g5l_a7934996-8cb9-4f65-896e-c9755d8b5712/registry-server/0.log" Oct 07 08:58:30 crc kubenswrapper[4875]: I1007 08:58:30.118608 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-fb44c8bf6-4qlwf_4180c049-4ede-4df6-929d-4a3250404f38/operator/0.log" Oct 07 08:58:30 crc kubenswrapper[4875]: I1007 08:58:30.331229 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-6d8b6f9b9-dtxrv_f5fb651c-9cca-4f7c-9136-37534358a8dd/kube-rbac-proxy/0.log" Oct 07 08:58:30 crc kubenswrapper[4875]: I1007 08:58:30.425242 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-6d8b6f9b9-dtxrv_f5fb651c-9cca-4f7c-9136-37534358a8dd/manager/0.log" Oct 07 08:58:30 crc kubenswrapper[4875]: I1007 08:58:30.462233 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-54689d9f88-8b99x_9e09310b-4437-4e30-881f-ed2dd568aa16/kube-rbac-proxy/0.log" Oct 07 08:58:30 crc kubenswrapper[4875]: I1007 08:58:30.588430 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-54689d9f88-8b99x_9e09310b-4437-4e30-881f-ed2dd568aa16/manager/0.log" Oct 07 08:58:30 crc kubenswrapper[4875]: I1007 08:58:30.680543 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_rabbitmq-cluster-operator-manager-5f97d8c699-rljtx_1b1c1a64-b0a5-4c2f-a43d-7cde7774094a/operator/0.log" Oct 07 08:58:30 crc kubenswrapper[4875]: I1007 08:58:30.853917 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-6859f9b676-gfr8f_91189044-d565-4a5e-9766-1bd11f300f11/kube-rbac-proxy/0.log" Oct 07 08:58:30 crc kubenswrapper[4875]: I1007 08:58:30.857680 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-667f8c4c67-cltql_1f9b6e86-8f57-4622-bc91-b062f04ec29f/manager/0.log" Oct 07 08:58:30 crc kubenswrapper[4875]: I1007 08:58:30.967118 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-6859f9b676-gfr8f_91189044-d565-4a5e-9766-1bd11f300f11/manager/0.log" Oct 07 08:58:31 crc kubenswrapper[4875]: I1007 08:58:31.005146 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-5d4d74dd89-z98x2_4aab0800-b79f-42f6-8d27-ce34e631f086/kube-rbac-proxy/0.log" Oct 07 08:58:31 crc kubenswrapper[4875]: I1007 08:58:31.117733 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-5d4d74dd89-z98x2_4aab0800-b79f-42f6-8d27-ce34e631f086/manager/0.log" Oct 07 08:58:31 crc kubenswrapper[4875]: I1007 08:58:31.154045 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-5cd5cb47d7-l8gnr_c57705e3-3b4e-4252-8c8d-0a21084ff5d8/kube-rbac-proxy/0.log" Oct 07 08:58:31 crc kubenswrapper[4875]: I1007 08:58:31.168685 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-5cd5cb47d7-l8gnr_c57705e3-3b4e-4252-8c8d-0a21084ff5d8/manager/0.log" Oct 07 08:58:31 crc kubenswrapper[4875]: I1007 08:58:31.323618 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-6cbc6dd547-9njzw_d4352aef-6eec-4342-8b1f-67a0bf3459f2/kube-rbac-proxy/0.log" Oct 07 08:58:31 crc kubenswrapper[4875]: I1007 08:58:31.346377 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-6cbc6dd547-9njzw_d4352aef-6eec-4342-8b1f-67a0bf3459f2/manager/0.log" Oct 07 08:58:37 crc kubenswrapper[4875]: I1007 08:58:37.825629 4875 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-fpwf5" podUID="92781bfa-9f68-4be9-a393-9a40fa2b4e52" containerName="registry-server" probeResult="failure" output=< Oct 07 08:58:37 crc kubenswrapper[4875]: timeout: failed to connect service ":50051" within 1s Oct 07 08:58:37 crc kubenswrapper[4875]: > Oct 07 08:58:40 crc kubenswrapper[4875]: I1007 08:58:40.697576 4875 scope.go:117] "RemoveContainer" containerID="df6fd2d1c37696e37bee974a5fbbdc769e47de499d9e7a0feb044da0ecedbca3" Oct 07 08:58:40 crc kubenswrapper[4875]: E1007 08:58:40.698494 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hx68m_openshift-machine-config-operator(3928c10c-c3da-41eb-96b2-629d67cfb31f)\"" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" podUID="3928c10c-c3da-41eb-96b2-629d67cfb31f" Oct 07 08:58:47 crc kubenswrapper[4875]: I1007 08:58:47.883408 4875 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-fpwf5" podUID="92781bfa-9f68-4be9-a393-9a40fa2b4e52" containerName="registry-server" probeResult="failure" output=< Oct 07 08:58:47 crc kubenswrapper[4875]: timeout: failed to connect service ":50051" within 1s Oct 07 08:58:47 crc kubenswrapper[4875]: > Oct 07 08:58:47 crc kubenswrapper[4875]: I1007 08:58:47.901804 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"barbican-operator-controller-manager-dockercfg-ltpnv" Oct 07 08:58:50 crc kubenswrapper[4875]: I1007 08:58:50.737986 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-wrx6d_e032dc50-8da1-4b34-981a-ec2b162cace7/control-plane-machine-set-operator/0.log" Oct 07 08:58:50 crc kubenswrapper[4875]: I1007 08:58:50.883288 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-zpc28_5679969e-90bf-49f0-b478-7312b6e13a05/kube-rbac-proxy/0.log" Oct 07 08:58:50 crc kubenswrapper[4875]: I1007 08:58:50.962939 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-zpc28_5679969e-90bf-49f0-b478-7312b6e13a05/machine-api-operator/0.log" Oct 07 08:58:55 crc kubenswrapper[4875]: I1007 08:58:55.705521 4875 scope.go:117] "RemoveContainer" containerID="df6fd2d1c37696e37bee974a5fbbdc769e47de499d9e7a0feb044da0ecedbca3" Oct 07 08:58:55 crc kubenswrapper[4875]: E1007 08:58:55.706475 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hx68m_openshift-machine-config-operator(3928c10c-c3da-41eb-96b2-629d67cfb31f)\"" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" podUID="3928c10c-c3da-41eb-96b2-629d67cfb31f" Oct 07 08:58:56 crc kubenswrapper[4875]: I1007 08:58:56.836619 4875 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-fpwf5" Oct 07 08:58:56 crc kubenswrapper[4875]: I1007 08:58:56.888485 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-fpwf5" Oct 07 08:58:56 crc kubenswrapper[4875]: I1007 08:58:56.965308 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-fpwf5"] Oct 07 08:58:57 crc kubenswrapper[4875]: I1007 08:58:57.086451 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-gfc5q"] Oct 07 08:58:57 crc kubenswrapper[4875]: I1007 08:58:57.086698 4875 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-gfc5q" podUID="29a4e16e-a79d-4dd1-b06c-eeb70e66e974" containerName="registry-server" containerID="cri-o://90ac5f7aa1e33d1595680d27047c907bc96a484480ac1d725f0959b9d3c675a2" gracePeriod=2 Oct 07 08:58:57 crc kubenswrapper[4875]: E1007 08:58:57.212513 4875 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 90ac5f7aa1e33d1595680d27047c907bc96a484480ac1d725f0959b9d3c675a2 is running failed: container process not found" containerID="90ac5f7aa1e33d1595680d27047c907bc96a484480ac1d725f0959b9d3c675a2" cmd=["grpc_health_probe","-addr=:50051"] Oct 07 08:58:57 crc kubenswrapper[4875]: E1007 08:58:57.213393 4875 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 90ac5f7aa1e33d1595680d27047c907bc96a484480ac1d725f0959b9d3c675a2 is running failed: container process not found" containerID="90ac5f7aa1e33d1595680d27047c907bc96a484480ac1d725f0959b9d3c675a2" cmd=["grpc_health_probe","-addr=:50051"] Oct 07 08:58:57 crc kubenswrapper[4875]: E1007 08:58:57.214055 4875 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 90ac5f7aa1e33d1595680d27047c907bc96a484480ac1d725f0959b9d3c675a2 is running failed: container process not found" containerID="90ac5f7aa1e33d1595680d27047c907bc96a484480ac1d725f0959b9d3c675a2" cmd=["grpc_health_probe","-addr=:50051"] Oct 07 08:58:57 crc kubenswrapper[4875]: E1007 08:58:57.214087 4875 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 90ac5f7aa1e33d1595680d27047c907bc96a484480ac1d725f0959b9d3c675a2 is running failed: container process not found" probeType="Readiness" pod="openshift-marketplace/redhat-operators-gfc5q" podUID="29a4e16e-a79d-4dd1-b06c-eeb70e66e974" containerName="registry-server" Oct 07 08:58:57 crc kubenswrapper[4875]: I1007 08:58:57.678502 4875 generic.go:334] "Generic (PLEG): container finished" podID="29a4e16e-a79d-4dd1-b06c-eeb70e66e974" containerID="90ac5f7aa1e33d1595680d27047c907bc96a484480ac1d725f0959b9d3c675a2" exitCode=0 Oct 07 08:58:57 crc kubenswrapper[4875]: I1007 08:58:57.679910 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gfc5q" event={"ID":"29a4e16e-a79d-4dd1-b06c-eeb70e66e974","Type":"ContainerDied","Data":"90ac5f7aa1e33d1595680d27047c907bc96a484480ac1d725f0959b9d3c675a2"} Oct 07 08:58:57 crc kubenswrapper[4875]: I1007 08:58:57.797594 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-gfc5q" Oct 07 08:58:57 crc kubenswrapper[4875]: I1007 08:58:57.872131 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/29a4e16e-a79d-4dd1-b06c-eeb70e66e974-catalog-content\") pod \"29a4e16e-a79d-4dd1-b06c-eeb70e66e974\" (UID: \"29a4e16e-a79d-4dd1-b06c-eeb70e66e974\") " Oct 07 08:58:57 crc kubenswrapper[4875]: I1007 08:58:57.872224 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/29a4e16e-a79d-4dd1-b06c-eeb70e66e974-utilities\") pod \"29a4e16e-a79d-4dd1-b06c-eeb70e66e974\" (UID: \"29a4e16e-a79d-4dd1-b06c-eeb70e66e974\") " Oct 07 08:58:57 crc kubenswrapper[4875]: I1007 08:58:57.872296 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rrvhg\" (UniqueName: \"kubernetes.io/projected/29a4e16e-a79d-4dd1-b06c-eeb70e66e974-kube-api-access-rrvhg\") pod \"29a4e16e-a79d-4dd1-b06c-eeb70e66e974\" (UID: \"29a4e16e-a79d-4dd1-b06c-eeb70e66e974\") " Oct 07 08:58:57 crc kubenswrapper[4875]: I1007 08:58:57.875600 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/29a4e16e-a79d-4dd1-b06c-eeb70e66e974-utilities" (OuterVolumeSpecName: "utilities") pod "29a4e16e-a79d-4dd1-b06c-eeb70e66e974" (UID: "29a4e16e-a79d-4dd1-b06c-eeb70e66e974"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 08:58:57 crc kubenswrapper[4875]: I1007 08:58:57.893055 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/29a4e16e-a79d-4dd1-b06c-eeb70e66e974-kube-api-access-rrvhg" (OuterVolumeSpecName: "kube-api-access-rrvhg") pod "29a4e16e-a79d-4dd1-b06c-eeb70e66e974" (UID: "29a4e16e-a79d-4dd1-b06c-eeb70e66e974"). InnerVolumeSpecName "kube-api-access-rrvhg". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 08:58:57 crc kubenswrapper[4875]: I1007 08:58:57.974352 4875 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/29a4e16e-a79d-4dd1-b06c-eeb70e66e974-utilities\") on node \"crc\" DevicePath \"\"" Oct 07 08:58:57 crc kubenswrapper[4875]: I1007 08:58:57.974388 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rrvhg\" (UniqueName: \"kubernetes.io/projected/29a4e16e-a79d-4dd1-b06c-eeb70e66e974-kube-api-access-rrvhg\") on node \"crc\" DevicePath \"\"" Oct 07 08:58:57 crc kubenswrapper[4875]: I1007 08:58:57.984265 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/29a4e16e-a79d-4dd1-b06c-eeb70e66e974-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "29a4e16e-a79d-4dd1-b06c-eeb70e66e974" (UID: "29a4e16e-a79d-4dd1-b06c-eeb70e66e974"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 08:58:58 crc kubenswrapper[4875]: I1007 08:58:58.076746 4875 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/29a4e16e-a79d-4dd1-b06c-eeb70e66e974-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 07 08:58:58 crc kubenswrapper[4875]: I1007 08:58:58.694214 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-gfc5q" Oct 07 08:58:58 crc kubenswrapper[4875]: I1007 08:58:58.694538 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gfc5q" event={"ID":"29a4e16e-a79d-4dd1-b06c-eeb70e66e974","Type":"ContainerDied","Data":"d53987b1eb04258d6bca55d6243ba4b4254a72937be2e4cb1d3ae65d245353e1"} Oct 07 08:58:58 crc kubenswrapper[4875]: I1007 08:58:58.694659 4875 scope.go:117] "RemoveContainer" containerID="90ac5f7aa1e33d1595680d27047c907bc96a484480ac1d725f0959b9d3c675a2" Oct 07 08:58:58 crc kubenswrapper[4875]: I1007 08:58:58.735376 4875 scope.go:117] "RemoveContainer" containerID="96325bde7d8a78e41273569c3e77aff734d12e4df4bc0186e33b7fbd16dc9d53" Oct 07 08:58:58 crc kubenswrapper[4875]: I1007 08:58:58.740501 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-gfc5q"] Oct 07 08:58:58 crc kubenswrapper[4875]: I1007 08:58:58.749214 4875 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-gfc5q"] Oct 07 08:58:58 crc kubenswrapper[4875]: I1007 08:58:58.777496 4875 scope.go:117] "RemoveContainer" containerID="189510f1aa650b3f41bbeaa53f0b65205ef00e48092e11596b550e3df47fa505" Oct 07 08:58:59 crc kubenswrapper[4875]: I1007 08:58:59.711250 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="29a4e16e-a79d-4dd1-b06c-eeb70e66e974" path="/var/lib/kubelet/pods/29a4e16e-a79d-4dd1-b06c-eeb70e66e974/volumes" Oct 07 08:59:04 crc kubenswrapper[4875]: I1007 08:59:04.326897 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-5b446d88c5-4th7n_34bd548d-264b-4fca-8e6e-153a3309bc28/cert-manager-controller/0.log" Oct 07 08:59:04 crc kubenswrapper[4875]: I1007 08:59:04.455921 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-7f985d654d-wgr7v_3d10ad9b-14eb-4da6-a7b9-8ca414305d45/cert-manager-cainjector/0.log" Oct 07 08:59:04 crc kubenswrapper[4875]: I1007 08:59:04.501455 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-5655c58dd6-rpz2k_3876b303-4b50-4e7f-afbc-fad7b7196009/cert-manager-webhook/0.log" Oct 07 08:59:08 crc kubenswrapper[4875]: I1007 08:59:08.699107 4875 scope.go:117] "RemoveContainer" containerID="df6fd2d1c37696e37bee974a5fbbdc769e47de499d9e7a0feb044da0ecedbca3" Oct 07 08:59:08 crc kubenswrapper[4875]: E1007 08:59:08.700141 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hx68m_openshift-machine-config-operator(3928c10c-c3da-41eb-96b2-629d67cfb31f)\"" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" podUID="3928c10c-c3da-41eb-96b2-629d67cfb31f" Oct 07 08:59:15 crc kubenswrapper[4875]: I1007 08:59:15.208012 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-console-plugin-6b874cbd85-fmrrb_85fcbeae-845a-48a4-8aea-e21d5df0e1fd/nmstate-console-plugin/0.log" Oct 07 08:59:15 crc kubenswrapper[4875]: I1007 08:59:15.331149 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-handler-cjhsf_546057f3-6bcf-4fd9-9c8d-1d21f13a70f8/nmstate-handler/0.log" Oct 07 08:59:15 crc kubenswrapper[4875]: I1007 08:59:15.407341 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-fdff9cb8d-lrxvs_a15028de-2729-40bb-add3-8d042826a0e5/kube-rbac-proxy/0.log" Oct 07 08:59:15 crc kubenswrapper[4875]: I1007 08:59:15.407843 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-fdff9cb8d-lrxvs_a15028de-2729-40bb-add3-8d042826a0e5/nmstate-metrics/0.log" Oct 07 08:59:15 crc kubenswrapper[4875]: I1007 08:59:15.577355 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-operator-858ddd8f98-t9gm5_53a570a0-5097-4c09-af4b-8bca758b17b6/nmstate-operator/0.log" Oct 07 08:59:15 crc kubenswrapper[4875]: I1007 08:59:15.629656 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-webhook-6cdbc54649-sv4nj_9bb1aee8-fddf-47ed-8bf5-a4f7310c6c2f/nmstate-webhook/0.log" Oct 07 08:59:20 crc kubenswrapper[4875]: I1007 08:59:20.697410 4875 scope.go:117] "RemoveContainer" containerID="df6fd2d1c37696e37bee974a5fbbdc769e47de499d9e7a0feb044da0ecedbca3" Oct 07 08:59:20 crc kubenswrapper[4875]: E1007 08:59:20.697925 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hx68m_openshift-machine-config-operator(3928c10c-c3da-41eb-96b2-629d67cfb31f)\"" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" podUID="3928c10c-c3da-41eb-96b2-629d67cfb31f" Oct 07 08:59:29 crc kubenswrapper[4875]: I1007 08:59:29.342572 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-68d546b9d8-jlztd_90069e5a-5263-4c20-9c21-caa665096b11/kube-rbac-proxy/0.log" Oct 07 08:59:29 crc kubenswrapper[4875]: I1007 08:59:29.410109 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-68d546b9d8-jlztd_90069e5a-5263-4c20-9c21-caa665096b11/controller/0.log" Oct 07 08:59:29 crc kubenswrapper[4875]: I1007 08:59:29.561102 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-s9mlf_a5e85290-9c5d-43cf-b0a8-41c2399d7122/cp-frr-files/0.log" Oct 07 08:59:29 crc kubenswrapper[4875]: I1007 08:59:29.719331 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-s9mlf_a5e85290-9c5d-43cf-b0a8-41c2399d7122/cp-reloader/0.log" Oct 07 08:59:29 crc kubenswrapper[4875]: I1007 08:59:29.725784 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-s9mlf_a5e85290-9c5d-43cf-b0a8-41c2399d7122/cp-frr-files/0.log" Oct 07 08:59:29 crc kubenswrapper[4875]: I1007 08:59:29.728646 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-s9mlf_a5e85290-9c5d-43cf-b0a8-41c2399d7122/cp-metrics/0.log" Oct 07 08:59:29 crc kubenswrapper[4875]: I1007 08:59:29.764208 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-s9mlf_a5e85290-9c5d-43cf-b0a8-41c2399d7122/cp-reloader/0.log" Oct 07 08:59:29 crc kubenswrapper[4875]: I1007 08:59:29.926290 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-s9mlf_a5e85290-9c5d-43cf-b0a8-41c2399d7122/cp-frr-files/0.log" Oct 07 08:59:29 crc kubenswrapper[4875]: I1007 08:59:29.926290 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-s9mlf_a5e85290-9c5d-43cf-b0a8-41c2399d7122/cp-metrics/0.log" Oct 07 08:59:29 crc kubenswrapper[4875]: I1007 08:59:29.963018 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-s9mlf_a5e85290-9c5d-43cf-b0a8-41c2399d7122/cp-reloader/0.log" Oct 07 08:59:29 crc kubenswrapper[4875]: I1007 08:59:29.986713 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-s9mlf_a5e85290-9c5d-43cf-b0a8-41c2399d7122/cp-metrics/0.log" Oct 07 08:59:30 crc kubenswrapper[4875]: I1007 08:59:30.151708 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-s9mlf_a5e85290-9c5d-43cf-b0a8-41c2399d7122/cp-frr-files/0.log" Oct 07 08:59:30 crc kubenswrapper[4875]: I1007 08:59:30.175010 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-s9mlf_a5e85290-9c5d-43cf-b0a8-41c2399d7122/controller/0.log" Oct 07 08:59:30 crc kubenswrapper[4875]: I1007 08:59:30.199449 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-s9mlf_a5e85290-9c5d-43cf-b0a8-41c2399d7122/cp-reloader/0.log" Oct 07 08:59:30 crc kubenswrapper[4875]: I1007 08:59:30.204745 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-s9mlf_a5e85290-9c5d-43cf-b0a8-41c2399d7122/cp-metrics/0.log" Oct 07 08:59:30 crc kubenswrapper[4875]: I1007 08:59:30.408094 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-s9mlf_a5e85290-9c5d-43cf-b0a8-41c2399d7122/frr-metrics/0.log" Oct 07 08:59:30 crc kubenswrapper[4875]: I1007 08:59:30.408529 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-s9mlf_a5e85290-9c5d-43cf-b0a8-41c2399d7122/kube-rbac-proxy/0.log" Oct 07 08:59:30 crc kubenswrapper[4875]: I1007 08:59:30.439626 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-s9mlf_a5e85290-9c5d-43cf-b0a8-41c2399d7122/kube-rbac-proxy-frr/0.log" Oct 07 08:59:30 crc kubenswrapper[4875]: I1007 08:59:30.640302 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-s9mlf_a5e85290-9c5d-43cf-b0a8-41c2399d7122/reloader/0.log" Oct 07 08:59:30 crc kubenswrapper[4875]: I1007 08:59:30.642606 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-64bf5d555-hw72k_2570bc7b-9601-4fe3-8fb7-da9277a529c7/frr-k8s-webhook-server/0.log" Oct 07 08:59:30 crc kubenswrapper[4875]: I1007 08:59:30.879669 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-79ccb7884d-7cp64_497229d2-1650-4976-88c5-24f97a0afdb8/manager/0.log" Oct 07 08:59:31 crc kubenswrapper[4875]: I1007 08:59:31.098992 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-769577ff68-db67z_f34be03b-6e0c-40e3-99b9-3b1dbe22e40a/webhook-server/0.log" Oct 07 08:59:31 crc kubenswrapper[4875]: I1007 08:59:31.265391 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-xf2p2_dd7e9a4a-b7f9-4f1d-8268-5ec15e5152f7/kube-rbac-proxy/0.log" Oct 07 08:59:31 crc kubenswrapper[4875]: I1007 08:59:31.891055 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-xf2p2_dd7e9a4a-b7f9-4f1d-8268-5ec15e5152f7/speaker/0.log" Oct 07 08:59:32 crc kubenswrapper[4875]: I1007 08:59:32.026312 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-s9mlf_a5e85290-9c5d-43cf-b0a8-41c2399d7122/frr/0.log" Oct 07 08:59:35 crc kubenswrapper[4875]: I1007 08:59:35.707836 4875 scope.go:117] "RemoveContainer" containerID="df6fd2d1c37696e37bee974a5fbbdc769e47de499d9e7a0feb044da0ecedbca3" Oct 07 08:59:35 crc kubenswrapper[4875]: E1007 08:59:35.708384 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hx68m_openshift-machine-config-operator(3928c10c-c3da-41eb-96b2-629d67cfb31f)\"" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" podUID="3928c10c-c3da-41eb-96b2-629d67cfb31f" Oct 07 08:59:43 crc kubenswrapper[4875]: I1007 08:59:43.319954 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2kt7sn_d6b0d9bd-27b9-41ec-bc0e-87a065c184ce/util/0.log" Oct 07 08:59:43 crc kubenswrapper[4875]: I1007 08:59:43.741422 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2kt7sn_d6b0d9bd-27b9-41ec-bc0e-87a065c184ce/util/0.log" Oct 07 08:59:43 crc kubenswrapper[4875]: I1007 08:59:43.786319 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2kt7sn_d6b0d9bd-27b9-41ec-bc0e-87a065c184ce/pull/0.log" Oct 07 08:59:43 crc kubenswrapper[4875]: I1007 08:59:43.789485 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2kt7sn_d6b0d9bd-27b9-41ec-bc0e-87a065c184ce/pull/0.log" Oct 07 08:59:44 crc kubenswrapper[4875]: I1007 08:59:44.010544 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2kt7sn_d6b0d9bd-27b9-41ec-bc0e-87a065c184ce/pull/0.log" Oct 07 08:59:44 crc kubenswrapper[4875]: I1007 08:59:44.044555 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2kt7sn_d6b0d9bd-27b9-41ec-bc0e-87a065c184ce/util/0.log" Oct 07 08:59:44 crc kubenswrapper[4875]: I1007 08:59:44.049737 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2kt7sn_d6b0d9bd-27b9-41ec-bc0e-87a065c184ce/extract/0.log" Oct 07 08:59:44 crc kubenswrapper[4875]: I1007 08:59:44.199595 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-ndzbj_a0195b31-f7a7-4146-8e5c-a11661371eda/extract-utilities/0.log" Oct 07 08:59:44 crc kubenswrapper[4875]: I1007 08:59:44.450871 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-ndzbj_a0195b31-f7a7-4146-8e5c-a11661371eda/extract-content/0.log" Oct 07 08:59:44 crc kubenswrapper[4875]: I1007 08:59:44.454560 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-ndzbj_a0195b31-f7a7-4146-8e5c-a11661371eda/extract-content/0.log" Oct 07 08:59:44 crc kubenswrapper[4875]: I1007 08:59:44.455771 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-ndzbj_a0195b31-f7a7-4146-8e5c-a11661371eda/extract-utilities/0.log" Oct 07 08:59:44 crc kubenswrapper[4875]: I1007 08:59:44.788440 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-ndzbj_a0195b31-f7a7-4146-8e5c-a11661371eda/extract-utilities/0.log" Oct 07 08:59:44 crc kubenswrapper[4875]: I1007 08:59:44.795055 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-ndzbj_a0195b31-f7a7-4146-8e5c-a11661371eda/extract-content/0.log" Oct 07 08:59:45 crc kubenswrapper[4875]: I1007 08:59:45.078811 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-9fmxk_a5460abe-f279-4a96-bb7b-6389750640bb/extract-utilities/0.log" Oct 07 08:59:45 crc kubenswrapper[4875]: I1007 08:59:45.336332 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-9fmxk_a5460abe-f279-4a96-bb7b-6389750640bb/extract-content/0.log" Oct 07 08:59:45 crc kubenswrapper[4875]: I1007 08:59:45.379893 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-9fmxk_a5460abe-f279-4a96-bb7b-6389750640bb/extract-utilities/0.log" Oct 07 08:59:45 crc kubenswrapper[4875]: I1007 08:59:45.425642 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-9fmxk_a5460abe-f279-4a96-bb7b-6389750640bb/extract-content/0.log" Oct 07 08:59:45 crc kubenswrapper[4875]: I1007 08:59:45.479161 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-ndzbj_a0195b31-f7a7-4146-8e5c-a11661371eda/registry-server/0.log" Oct 07 08:59:45 crc kubenswrapper[4875]: I1007 08:59:45.583081 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-9fmxk_a5460abe-f279-4a96-bb7b-6389750640bb/extract-utilities/0.log" Oct 07 08:59:45 crc kubenswrapper[4875]: I1007 08:59:45.626373 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-9fmxk_a5460abe-f279-4a96-bb7b-6389750640bb/extract-content/0.log" Oct 07 08:59:45 crc kubenswrapper[4875]: I1007 08:59:45.834192 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cg95v6_5e2cee0f-3bcf-478e-9c4c-650f621b6856/util/0.log" Oct 07 08:59:46 crc kubenswrapper[4875]: I1007 08:59:46.106543 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cg95v6_5e2cee0f-3bcf-478e-9c4c-650f621b6856/pull/0.log" Oct 07 08:59:46 crc kubenswrapper[4875]: I1007 08:59:46.108009 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cg95v6_5e2cee0f-3bcf-478e-9c4c-650f621b6856/util/0.log" Oct 07 08:59:46 crc kubenswrapper[4875]: I1007 08:59:46.199336 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cg95v6_5e2cee0f-3bcf-478e-9c4c-650f621b6856/pull/0.log" Oct 07 08:59:46 crc kubenswrapper[4875]: I1007 08:59:46.374767 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cg95v6_5e2cee0f-3bcf-478e-9c4c-650f621b6856/util/0.log" Oct 07 08:59:46 crc kubenswrapper[4875]: I1007 08:59:46.457353 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cg95v6_5e2cee0f-3bcf-478e-9c4c-650f621b6856/extract/0.log" Oct 07 08:59:46 crc kubenswrapper[4875]: I1007 08:59:46.460972 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cg95v6_5e2cee0f-3bcf-478e-9c4c-650f621b6856/pull/0.log" Oct 07 08:59:46 crc kubenswrapper[4875]: I1007 08:59:46.542832 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-9fmxk_a5460abe-f279-4a96-bb7b-6389750640bb/registry-server/0.log" Oct 07 08:59:46 crc kubenswrapper[4875]: I1007 08:59:46.641103 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-kjd46_193bb790-d155-4aee-9f31-41b457c429ca/marketplace-operator/0.log" Oct 07 08:59:46 crc kubenswrapper[4875]: I1007 08:59:46.831321 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-jbrrn_bcc94b2c-ef4b-4a9b-ba01-17f23df3b946/extract-utilities/0.log" Oct 07 08:59:47 crc kubenswrapper[4875]: I1007 08:59:47.004037 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-jbrrn_bcc94b2c-ef4b-4a9b-ba01-17f23df3b946/extract-utilities/0.log" Oct 07 08:59:47 crc kubenswrapper[4875]: I1007 08:59:47.030380 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-jbrrn_bcc94b2c-ef4b-4a9b-ba01-17f23df3b946/extract-content/0.log" Oct 07 08:59:47 crc kubenswrapper[4875]: I1007 08:59:47.038013 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-jbrrn_bcc94b2c-ef4b-4a9b-ba01-17f23df3b946/extract-content/0.log" Oct 07 08:59:47 crc kubenswrapper[4875]: I1007 08:59:47.254627 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-jbrrn_bcc94b2c-ef4b-4a9b-ba01-17f23df3b946/extract-utilities/0.log" Oct 07 08:59:47 crc kubenswrapper[4875]: I1007 08:59:47.257094 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-jbrrn_bcc94b2c-ef4b-4a9b-ba01-17f23df3b946/extract-content/0.log" Oct 07 08:59:47 crc kubenswrapper[4875]: I1007 08:59:47.350510 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-fpwf5_92781bfa-9f68-4be9-a393-9a40fa2b4e52/extract-utilities/0.log" Oct 07 08:59:47 crc kubenswrapper[4875]: I1007 08:59:47.419379 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-jbrrn_bcc94b2c-ef4b-4a9b-ba01-17f23df3b946/registry-server/0.log" Oct 07 08:59:47 crc kubenswrapper[4875]: I1007 08:59:47.578810 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-fpwf5_92781bfa-9f68-4be9-a393-9a40fa2b4e52/extract-content/0.log" Oct 07 08:59:47 crc kubenswrapper[4875]: I1007 08:59:47.587395 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-fpwf5_92781bfa-9f68-4be9-a393-9a40fa2b4e52/extract-utilities/0.log" Oct 07 08:59:47 crc kubenswrapper[4875]: I1007 08:59:47.592441 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-fpwf5_92781bfa-9f68-4be9-a393-9a40fa2b4e52/extract-content/0.log" Oct 07 08:59:47 crc kubenswrapper[4875]: I1007 08:59:47.763270 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-fpwf5_92781bfa-9f68-4be9-a393-9a40fa2b4e52/extract-utilities/0.log" Oct 07 08:59:47 crc kubenswrapper[4875]: I1007 08:59:47.800696 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-fpwf5_92781bfa-9f68-4be9-a393-9a40fa2b4e52/extract-content/0.log" Oct 07 08:59:47 crc kubenswrapper[4875]: I1007 08:59:47.925310 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-fpwf5_92781bfa-9f68-4be9-a393-9a40fa2b4e52/registry-server/0.log" Oct 07 08:59:49 crc kubenswrapper[4875]: I1007 08:59:49.698735 4875 scope.go:117] "RemoveContainer" containerID="df6fd2d1c37696e37bee974a5fbbdc769e47de499d9e7a0feb044da0ecedbca3" Oct 07 08:59:49 crc kubenswrapper[4875]: E1007 08:59:49.699756 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hx68m_openshift-machine-config-operator(3928c10c-c3da-41eb-96b2-629d67cfb31f)\"" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" podUID="3928c10c-c3da-41eb-96b2-629d67cfb31f" Oct 07 09:00:00 crc kubenswrapper[4875]: I1007 09:00:00.149723 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29330460-fvxtb"] Oct 07 09:00:00 crc kubenswrapper[4875]: E1007 09:00:00.150706 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3bbeaa56-147d-4e2c-b10d-68341c9e41ba" containerName="container-00" Oct 07 09:00:00 crc kubenswrapper[4875]: I1007 09:00:00.150721 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="3bbeaa56-147d-4e2c-b10d-68341c9e41ba" containerName="container-00" Oct 07 09:00:00 crc kubenswrapper[4875]: E1007 09:00:00.150734 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="29a4e16e-a79d-4dd1-b06c-eeb70e66e974" containerName="extract-utilities" Oct 07 09:00:00 crc kubenswrapper[4875]: I1007 09:00:00.150740 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="29a4e16e-a79d-4dd1-b06c-eeb70e66e974" containerName="extract-utilities" Oct 07 09:00:00 crc kubenswrapper[4875]: E1007 09:00:00.150757 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="29a4e16e-a79d-4dd1-b06c-eeb70e66e974" containerName="extract-content" Oct 07 09:00:00 crc kubenswrapper[4875]: I1007 09:00:00.150763 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="29a4e16e-a79d-4dd1-b06c-eeb70e66e974" containerName="extract-content" Oct 07 09:00:00 crc kubenswrapper[4875]: E1007 09:00:00.150824 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="29a4e16e-a79d-4dd1-b06c-eeb70e66e974" containerName="registry-server" Oct 07 09:00:00 crc kubenswrapper[4875]: I1007 09:00:00.150836 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="29a4e16e-a79d-4dd1-b06c-eeb70e66e974" containerName="registry-server" Oct 07 09:00:00 crc kubenswrapper[4875]: I1007 09:00:00.151126 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="29a4e16e-a79d-4dd1-b06c-eeb70e66e974" containerName="registry-server" Oct 07 09:00:00 crc kubenswrapper[4875]: I1007 09:00:00.151151 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="3bbeaa56-147d-4e2c-b10d-68341c9e41ba" containerName="container-00" Oct 07 09:00:00 crc kubenswrapper[4875]: I1007 09:00:00.152064 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29330460-fvxtb" Oct 07 09:00:00 crc kubenswrapper[4875]: I1007 09:00:00.154583 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Oct 07 09:00:00 crc kubenswrapper[4875]: I1007 09:00:00.155987 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Oct 07 09:00:00 crc kubenswrapper[4875]: I1007 09:00:00.166792 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29330460-fvxtb"] Oct 07 09:00:00 crc kubenswrapper[4875]: I1007 09:00:00.210460 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/7693dd97-b6d3-4922-9e35-ed1ec9659e0a-config-volume\") pod \"collect-profiles-29330460-fvxtb\" (UID: \"7693dd97-b6d3-4922-9e35-ed1ec9659e0a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29330460-fvxtb" Oct 07 09:00:00 crc kubenswrapper[4875]: I1007 09:00:00.210863 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cfcmf\" (UniqueName: \"kubernetes.io/projected/7693dd97-b6d3-4922-9e35-ed1ec9659e0a-kube-api-access-cfcmf\") pod \"collect-profiles-29330460-fvxtb\" (UID: \"7693dd97-b6d3-4922-9e35-ed1ec9659e0a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29330460-fvxtb" Oct 07 09:00:00 crc kubenswrapper[4875]: I1007 09:00:00.210916 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/7693dd97-b6d3-4922-9e35-ed1ec9659e0a-secret-volume\") pod \"collect-profiles-29330460-fvxtb\" (UID: \"7693dd97-b6d3-4922-9e35-ed1ec9659e0a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29330460-fvxtb" Oct 07 09:00:00 crc kubenswrapper[4875]: I1007 09:00:00.312612 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/7693dd97-b6d3-4922-9e35-ed1ec9659e0a-config-volume\") pod \"collect-profiles-29330460-fvxtb\" (UID: \"7693dd97-b6d3-4922-9e35-ed1ec9659e0a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29330460-fvxtb" Oct 07 09:00:00 crc kubenswrapper[4875]: I1007 09:00:00.312738 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cfcmf\" (UniqueName: \"kubernetes.io/projected/7693dd97-b6d3-4922-9e35-ed1ec9659e0a-kube-api-access-cfcmf\") pod \"collect-profiles-29330460-fvxtb\" (UID: \"7693dd97-b6d3-4922-9e35-ed1ec9659e0a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29330460-fvxtb" Oct 07 09:00:00 crc kubenswrapper[4875]: I1007 09:00:00.312809 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/7693dd97-b6d3-4922-9e35-ed1ec9659e0a-secret-volume\") pod \"collect-profiles-29330460-fvxtb\" (UID: \"7693dd97-b6d3-4922-9e35-ed1ec9659e0a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29330460-fvxtb" Oct 07 09:00:00 crc kubenswrapper[4875]: I1007 09:00:00.313949 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/7693dd97-b6d3-4922-9e35-ed1ec9659e0a-config-volume\") pod \"collect-profiles-29330460-fvxtb\" (UID: \"7693dd97-b6d3-4922-9e35-ed1ec9659e0a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29330460-fvxtb" Oct 07 09:00:00 crc kubenswrapper[4875]: I1007 09:00:00.323828 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/7693dd97-b6d3-4922-9e35-ed1ec9659e0a-secret-volume\") pod \"collect-profiles-29330460-fvxtb\" (UID: \"7693dd97-b6d3-4922-9e35-ed1ec9659e0a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29330460-fvxtb" Oct 07 09:00:00 crc kubenswrapper[4875]: I1007 09:00:00.340054 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cfcmf\" (UniqueName: \"kubernetes.io/projected/7693dd97-b6d3-4922-9e35-ed1ec9659e0a-kube-api-access-cfcmf\") pod \"collect-profiles-29330460-fvxtb\" (UID: \"7693dd97-b6d3-4922-9e35-ed1ec9659e0a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29330460-fvxtb" Oct 07 09:00:00 crc kubenswrapper[4875]: I1007 09:00:00.495918 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29330460-fvxtb" Oct 07 09:00:00 crc kubenswrapper[4875]: I1007 09:00:00.973353 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29330460-fvxtb"] Oct 07 09:00:01 crc kubenswrapper[4875]: I1007 09:00:01.336369 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29330460-fvxtb" event={"ID":"7693dd97-b6d3-4922-9e35-ed1ec9659e0a","Type":"ContainerStarted","Data":"91e0da85f47ffd4613f092cf1d21856dd70be3cd7e13ebf1f2d271dc49903b96"} Oct 07 09:00:01 crc kubenswrapper[4875]: I1007 09:00:01.336456 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29330460-fvxtb" event={"ID":"7693dd97-b6d3-4922-9e35-ed1ec9659e0a","Type":"ContainerStarted","Data":"46f17e9125444162b458dc43af9f48f494e549b93088342adbe5ce103dc83132"} Oct 07 09:00:02 crc kubenswrapper[4875]: I1007 09:00:02.350790 4875 generic.go:334] "Generic (PLEG): container finished" podID="7693dd97-b6d3-4922-9e35-ed1ec9659e0a" containerID="91e0da85f47ffd4613f092cf1d21856dd70be3cd7e13ebf1f2d271dc49903b96" exitCode=0 Oct 07 09:00:02 crc kubenswrapper[4875]: I1007 09:00:02.350869 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29330460-fvxtb" event={"ID":"7693dd97-b6d3-4922-9e35-ed1ec9659e0a","Type":"ContainerDied","Data":"91e0da85f47ffd4613f092cf1d21856dd70be3cd7e13ebf1f2d271dc49903b96"} Oct 07 09:00:02 crc kubenswrapper[4875]: I1007 09:00:02.698614 4875 scope.go:117] "RemoveContainer" containerID="df6fd2d1c37696e37bee974a5fbbdc769e47de499d9e7a0feb044da0ecedbca3" Oct 07 09:00:02 crc kubenswrapper[4875]: E1007 09:00:02.699045 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hx68m_openshift-machine-config-operator(3928c10c-c3da-41eb-96b2-629d67cfb31f)\"" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" podUID="3928c10c-c3da-41eb-96b2-629d67cfb31f" Oct 07 09:00:03 crc kubenswrapper[4875]: I1007 09:00:03.750557 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29330460-fvxtb" Oct 07 09:00:03 crc kubenswrapper[4875]: I1007 09:00:03.960083 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/7693dd97-b6d3-4922-9e35-ed1ec9659e0a-secret-volume\") pod \"7693dd97-b6d3-4922-9e35-ed1ec9659e0a\" (UID: \"7693dd97-b6d3-4922-9e35-ed1ec9659e0a\") " Oct 07 09:00:03 crc kubenswrapper[4875]: I1007 09:00:03.960236 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/7693dd97-b6d3-4922-9e35-ed1ec9659e0a-config-volume\") pod \"7693dd97-b6d3-4922-9e35-ed1ec9659e0a\" (UID: \"7693dd97-b6d3-4922-9e35-ed1ec9659e0a\") " Oct 07 09:00:03 crc kubenswrapper[4875]: I1007 09:00:03.960352 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cfcmf\" (UniqueName: \"kubernetes.io/projected/7693dd97-b6d3-4922-9e35-ed1ec9659e0a-kube-api-access-cfcmf\") pod \"7693dd97-b6d3-4922-9e35-ed1ec9659e0a\" (UID: \"7693dd97-b6d3-4922-9e35-ed1ec9659e0a\") " Oct 07 09:00:03 crc kubenswrapper[4875]: I1007 09:00:03.962567 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7693dd97-b6d3-4922-9e35-ed1ec9659e0a-config-volume" (OuterVolumeSpecName: "config-volume") pod "7693dd97-b6d3-4922-9e35-ed1ec9659e0a" (UID: "7693dd97-b6d3-4922-9e35-ed1ec9659e0a"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 09:00:03 crc kubenswrapper[4875]: I1007 09:00:03.970423 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7693dd97-b6d3-4922-9e35-ed1ec9659e0a-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "7693dd97-b6d3-4922-9e35-ed1ec9659e0a" (UID: "7693dd97-b6d3-4922-9e35-ed1ec9659e0a"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 09:00:03 crc kubenswrapper[4875]: I1007 09:00:03.976133 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7693dd97-b6d3-4922-9e35-ed1ec9659e0a-kube-api-access-cfcmf" (OuterVolumeSpecName: "kube-api-access-cfcmf") pod "7693dd97-b6d3-4922-9e35-ed1ec9659e0a" (UID: "7693dd97-b6d3-4922-9e35-ed1ec9659e0a"). InnerVolumeSpecName "kube-api-access-cfcmf". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 09:00:04 crc kubenswrapper[4875]: I1007 09:00:04.063273 4875 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/7693dd97-b6d3-4922-9e35-ed1ec9659e0a-config-volume\") on node \"crc\" DevicePath \"\"" Oct 07 09:00:04 crc kubenswrapper[4875]: I1007 09:00:04.063318 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cfcmf\" (UniqueName: \"kubernetes.io/projected/7693dd97-b6d3-4922-9e35-ed1ec9659e0a-kube-api-access-cfcmf\") on node \"crc\" DevicePath \"\"" Oct 07 09:00:04 crc kubenswrapper[4875]: I1007 09:00:04.063332 4875 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/7693dd97-b6d3-4922-9e35-ed1ec9659e0a-secret-volume\") on node \"crc\" DevicePath \"\"" Oct 07 09:00:04 crc kubenswrapper[4875]: I1007 09:00:04.380990 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29330460-fvxtb" event={"ID":"7693dd97-b6d3-4922-9e35-ed1ec9659e0a","Type":"ContainerDied","Data":"46f17e9125444162b458dc43af9f48f494e549b93088342adbe5ce103dc83132"} Oct 07 09:00:04 crc kubenswrapper[4875]: I1007 09:00:04.381307 4875 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="46f17e9125444162b458dc43af9f48f494e549b93088342adbe5ce103dc83132" Oct 07 09:00:04 crc kubenswrapper[4875]: I1007 09:00:04.381088 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29330460-fvxtb" Oct 07 09:00:04 crc kubenswrapper[4875]: I1007 09:00:04.465338 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29330415-pdssx"] Oct 07 09:00:04 crc kubenswrapper[4875]: I1007 09:00:04.472227 4875 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29330415-pdssx"] Oct 07 09:00:05 crc kubenswrapper[4875]: I1007 09:00:05.710023 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d4efbc99-08f2-418f-a6e0-d7b17f655810" path="/var/lib/kubelet/pods/d4efbc99-08f2-418f-a6e0-d7b17f655810/volumes" Oct 07 09:00:15 crc kubenswrapper[4875]: I1007 09:00:15.706282 4875 scope.go:117] "RemoveContainer" containerID="df6fd2d1c37696e37bee974a5fbbdc769e47de499d9e7a0feb044da0ecedbca3" Oct 07 09:00:15 crc kubenswrapper[4875]: E1007 09:00:15.707171 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hx68m_openshift-machine-config-operator(3928c10c-c3da-41eb-96b2-629d67cfb31f)\"" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" podUID="3928c10c-c3da-41eb-96b2-629d67cfb31f" Oct 07 09:00:21 crc kubenswrapper[4875]: I1007 09:00:21.291662 4875 scope.go:117] "RemoveContainer" containerID="fc184331fd467ad772a2a8313e1c0c81b3662f1178cef81c72e8eb6c46b7993e" Oct 07 09:00:29 crc kubenswrapper[4875]: I1007 09:00:29.698346 4875 scope.go:117] "RemoveContainer" containerID="df6fd2d1c37696e37bee974a5fbbdc769e47de499d9e7a0feb044da0ecedbca3" Oct 07 09:00:29 crc kubenswrapper[4875]: E1007 09:00:29.699031 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hx68m_openshift-machine-config-operator(3928c10c-c3da-41eb-96b2-629d67cfb31f)\"" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" podUID="3928c10c-c3da-41eb-96b2-629d67cfb31f" Oct 07 09:00:43 crc kubenswrapper[4875]: I1007 09:00:43.697684 4875 scope.go:117] "RemoveContainer" containerID="df6fd2d1c37696e37bee974a5fbbdc769e47de499d9e7a0feb044da0ecedbca3" Oct 07 09:00:43 crc kubenswrapper[4875]: E1007 09:00:43.698553 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hx68m_openshift-machine-config-operator(3928c10c-c3da-41eb-96b2-629d67cfb31f)\"" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" podUID="3928c10c-c3da-41eb-96b2-629d67cfb31f" Oct 07 09:00:55 crc kubenswrapper[4875]: I1007 09:00:55.705840 4875 scope.go:117] "RemoveContainer" containerID="df6fd2d1c37696e37bee974a5fbbdc769e47de499d9e7a0feb044da0ecedbca3" Oct 07 09:00:55 crc kubenswrapper[4875]: E1007 09:00:55.706680 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hx68m_openshift-machine-config-operator(3928c10c-c3da-41eb-96b2-629d67cfb31f)\"" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" podUID="3928c10c-c3da-41eb-96b2-629d67cfb31f" Oct 07 09:01:00 crc kubenswrapper[4875]: I1007 09:01:00.153079 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-cron-29330461-27nn2"] Oct 07 09:01:00 crc kubenswrapper[4875]: E1007 09:01:00.154072 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7693dd97-b6d3-4922-9e35-ed1ec9659e0a" containerName="collect-profiles" Oct 07 09:01:00 crc kubenswrapper[4875]: I1007 09:01:00.154090 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="7693dd97-b6d3-4922-9e35-ed1ec9659e0a" containerName="collect-profiles" Oct 07 09:01:00 crc kubenswrapper[4875]: I1007 09:01:00.154270 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="7693dd97-b6d3-4922-9e35-ed1ec9659e0a" containerName="collect-profiles" Oct 07 09:01:00 crc kubenswrapper[4875]: I1007 09:01:00.154986 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29330461-27nn2" Oct 07 09:01:00 crc kubenswrapper[4875]: I1007 09:01:00.168312 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29330461-27nn2"] Oct 07 09:01:00 crc kubenswrapper[4875]: I1007 09:01:00.247530 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p52pg\" (UniqueName: \"kubernetes.io/projected/89f08dcf-ce53-43d8-a1be-fcf0c50358f4-kube-api-access-p52pg\") pod \"keystone-cron-29330461-27nn2\" (UID: \"89f08dcf-ce53-43d8-a1be-fcf0c50358f4\") " pod="openstack/keystone-cron-29330461-27nn2" Oct 07 09:01:00 crc kubenswrapper[4875]: I1007 09:01:00.247631 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/89f08dcf-ce53-43d8-a1be-fcf0c50358f4-config-data\") pod \"keystone-cron-29330461-27nn2\" (UID: \"89f08dcf-ce53-43d8-a1be-fcf0c50358f4\") " pod="openstack/keystone-cron-29330461-27nn2" Oct 07 09:01:00 crc kubenswrapper[4875]: I1007 09:01:00.247684 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/89f08dcf-ce53-43d8-a1be-fcf0c50358f4-combined-ca-bundle\") pod \"keystone-cron-29330461-27nn2\" (UID: \"89f08dcf-ce53-43d8-a1be-fcf0c50358f4\") " pod="openstack/keystone-cron-29330461-27nn2" Oct 07 09:01:00 crc kubenswrapper[4875]: I1007 09:01:00.247712 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/89f08dcf-ce53-43d8-a1be-fcf0c50358f4-fernet-keys\") pod \"keystone-cron-29330461-27nn2\" (UID: \"89f08dcf-ce53-43d8-a1be-fcf0c50358f4\") " pod="openstack/keystone-cron-29330461-27nn2" Oct 07 09:01:00 crc kubenswrapper[4875]: I1007 09:01:00.349717 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p52pg\" (UniqueName: \"kubernetes.io/projected/89f08dcf-ce53-43d8-a1be-fcf0c50358f4-kube-api-access-p52pg\") pod \"keystone-cron-29330461-27nn2\" (UID: \"89f08dcf-ce53-43d8-a1be-fcf0c50358f4\") " pod="openstack/keystone-cron-29330461-27nn2" Oct 07 09:01:00 crc kubenswrapper[4875]: I1007 09:01:00.349822 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/89f08dcf-ce53-43d8-a1be-fcf0c50358f4-config-data\") pod \"keystone-cron-29330461-27nn2\" (UID: \"89f08dcf-ce53-43d8-a1be-fcf0c50358f4\") " pod="openstack/keystone-cron-29330461-27nn2" Oct 07 09:01:00 crc kubenswrapper[4875]: I1007 09:01:00.349946 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/89f08dcf-ce53-43d8-a1be-fcf0c50358f4-combined-ca-bundle\") pod \"keystone-cron-29330461-27nn2\" (UID: \"89f08dcf-ce53-43d8-a1be-fcf0c50358f4\") " pod="openstack/keystone-cron-29330461-27nn2" Oct 07 09:01:00 crc kubenswrapper[4875]: I1007 09:01:00.349984 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/89f08dcf-ce53-43d8-a1be-fcf0c50358f4-fernet-keys\") pod \"keystone-cron-29330461-27nn2\" (UID: \"89f08dcf-ce53-43d8-a1be-fcf0c50358f4\") " pod="openstack/keystone-cron-29330461-27nn2" Oct 07 09:01:00 crc kubenswrapper[4875]: I1007 09:01:00.357450 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/89f08dcf-ce53-43d8-a1be-fcf0c50358f4-config-data\") pod \"keystone-cron-29330461-27nn2\" (UID: \"89f08dcf-ce53-43d8-a1be-fcf0c50358f4\") " pod="openstack/keystone-cron-29330461-27nn2" Oct 07 09:01:00 crc kubenswrapper[4875]: I1007 09:01:00.357824 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/89f08dcf-ce53-43d8-a1be-fcf0c50358f4-fernet-keys\") pod \"keystone-cron-29330461-27nn2\" (UID: \"89f08dcf-ce53-43d8-a1be-fcf0c50358f4\") " pod="openstack/keystone-cron-29330461-27nn2" Oct 07 09:01:00 crc kubenswrapper[4875]: I1007 09:01:00.366757 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/89f08dcf-ce53-43d8-a1be-fcf0c50358f4-combined-ca-bundle\") pod \"keystone-cron-29330461-27nn2\" (UID: \"89f08dcf-ce53-43d8-a1be-fcf0c50358f4\") " pod="openstack/keystone-cron-29330461-27nn2" Oct 07 09:01:00 crc kubenswrapper[4875]: I1007 09:01:00.367281 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p52pg\" (UniqueName: \"kubernetes.io/projected/89f08dcf-ce53-43d8-a1be-fcf0c50358f4-kube-api-access-p52pg\") pod \"keystone-cron-29330461-27nn2\" (UID: \"89f08dcf-ce53-43d8-a1be-fcf0c50358f4\") " pod="openstack/keystone-cron-29330461-27nn2" Oct 07 09:01:00 crc kubenswrapper[4875]: I1007 09:01:00.493200 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29330461-27nn2" Oct 07 09:01:00 crc kubenswrapper[4875]: I1007 09:01:00.994275 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29330461-27nn2"] Oct 07 09:01:01 crc kubenswrapper[4875]: I1007 09:01:01.985914 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29330461-27nn2" event={"ID":"89f08dcf-ce53-43d8-a1be-fcf0c50358f4","Type":"ContainerStarted","Data":"5cb06d109f538740b4bad62472a7a2c38657ec7e33d1d26dd95d5418bc5dc36e"} Oct 07 09:01:01 crc kubenswrapper[4875]: I1007 09:01:01.986380 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29330461-27nn2" event={"ID":"89f08dcf-ce53-43d8-a1be-fcf0c50358f4","Type":"ContainerStarted","Data":"584b58d603347a097a7ea56d9cb1111859519a22c03c053548ae31ee1ac285cb"} Oct 07 09:01:04 crc kubenswrapper[4875]: I1007 09:01:04.007641 4875 generic.go:334] "Generic (PLEG): container finished" podID="89f08dcf-ce53-43d8-a1be-fcf0c50358f4" containerID="5cb06d109f538740b4bad62472a7a2c38657ec7e33d1d26dd95d5418bc5dc36e" exitCode=0 Oct 07 09:01:04 crc kubenswrapper[4875]: I1007 09:01:04.007725 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29330461-27nn2" event={"ID":"89f08dcf-ce53-43d8-a1be-fcf0c50358f4","Type":"ContainerDied","Data":"5cb06d109f538740b4bad62472a7a2c38657ec7e33d1d26dd95d5418bc5dc36e"} Oct 07 09:01:05 crc kubenswrapper[4875]: I1007 09:01:05.365779 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29330461-27nn2" Oct 07 09:01:05 crc kubenswrapper[4875]: I1007 09:01:05.462286 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/89f08dcf-ce53-43d8-a1be-fcf0c50358f4-combined-ca-bundle\") pod \"89f08dcf-ce53-43d8-a1be-fcf0c50358f4\" (UID: \"89f08dcf-ce53-43d8-a1be-fcf0c50358f4\") " Oct 07 09:01:05 crc kubenswrapper[4875]: I1007 09:01:05.462405 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/89f08dcf-ce53-43d8-a1be-fcf0c50358f4-fernet-keys\") pod \"89f08dcf-ce53-43d8-a1be-fcf0c50358f4\" (UID: \"89f08dcf-ce53-43d8-a1be-fcf0c50358f4\") " Oct 07 09:01:05 crc kubenswrapper[4875]: I1007 09:01:05.462500 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p52pg\" (UniqueName: \"kubernetes.io/projected/89f08dcf-ce53-43d8-a1be-fcf0c50358f4-kube-api-access-p52pg\") pod \"89f08dcf-ce53-43d8-a1be-fcf0c50358f4\" (UID: \"89f08dcf-ce53-43d8-a1be-fcf0c50358f4\") " Oct 07 09:01:05 crc kubenswrapper[4875]: I1007 09:01:05.462546 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/89f08dcf-ce53-43d8-a1be-fcf0c50358f4-config-data\") pod \"89f08dcf-ce53-43d8-a1be-fcf0c50358f4\" (UID: \"89f08dcf-ce53-43d8-a1be-fcf0c50358f4\") " Oct 07 09:01:05 crc kubenswrapper[4875]: I1007 09:01:05.491476 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/89f08dcf-ce53-43d8-a1be-fcf0c50358f4-kube-api-access-p52pg" (OuterVolumeSpecName: "kube-api-access-p52pg") pod "89f08dcf-ce53-43d8-a1be-fcf0c50358f4" (UID: "89f08dcf-ce53-43d8-a1be-fcf0c50358f4"). InnerVolumeSpecName "kube-api-access-p52pg". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 09:01:05 crc kubenswrapper[4875]: I1007 09:01:05.491641 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/89f08dcf-ce53-43d8-a1be-fcf0c50358f4-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "89f08dcf-ce53-43d8-a1be-fcf0c50358f4" (UID: "89f08dcf-ce53-43d8-a1be-fcf0c50358f4"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 09:01:05 crc kubenswrapper[4875]: I1007 09:01:05.519321 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/89f08dcf-ce53-43d8-a1be-fcf0c50358f4-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "89f08dcf-ce53-43d8-a1be-fcf0c50358f4" (UID: "89f08dcf-ce53-43d8-a1be-fcf0c50358f4"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 09:01:05 crc kubenswrapper[4875]: I1007 09:01:05.545792 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/89f08dcf-ce53-43d8-a1be-fcf0c50358f4-config-data" (OuterVolumeSpecName: "config-data") pod "89f08dcf-ce53-43d8-a1be-fcf0c50358f4" (UID: "89f08dcf-ce53-43d8-a1be-fcf0c50358f4"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 09:01:05 crc kubenswrapper[4875]: I1007 09:01:05.564862 4875 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/89f08dcf-ce53-43d8-a1be-fcf0c50358f4-config-data\") on node \"crc\" DevicePath \"\"" Oct 07 09:01:05 crc kubenswrapper[4875]: I1007 09:01:05.564949 4875 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/89f08dcf-ce53-43d8-a1be-fcf0c50358f4-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 09:01:05 crc kubenswrapper[4875]: I1007 09:01:05.564965 4875 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/89f08dcf-ce53-43d8-a1be-fcf0c50358f4-fernet-keys\") on node \"crc\" DevicePath \"\"" Oct 07 09:01:05 crc kubenswrapper[4875]: I1007 09:01:05.564977 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p52pg\" (UniqueName: \"kubernetes.io/projected/89f08dcf-ce53-43d8-a1be-fcf0c50358f4-kube-api-access-p52pg\") on node \"crc\" DevicePath \"\"" Oct 07 09:01:06 crc kubenswrapper[4875]: I1007 09:01:06.027022 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29330461-27nn2" event={"ID":"89f08dcf-ce53-43d8-a1be-fcf0c50358f4","Type":"ContainerDied","Data":"584b58d603347a097a7ea56d9cb1111859519a22c03c053548ae31ee1ac285cb"} Oct 07 09:01:06 crc kubenswrapper[4875]: I1007 09:01:06.027077 4875 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="584b58d603347a097a7ea56d9cb1111859519a22c03c053548ae31ee1ac285cb" Oct 07 09:01:06 crc kubenswrapper[4875]: I1007 09:01:06.027142 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29330461-27nn2" Oct 07 09:01:09 crc kubenswrapper[4875]: I1007 09:01:09.700213 4875 scope.go:117] "RemoveContainer" containerID="df6fd2d1c37696e37bee974a5fbbdc769e47de499d9e7a0feb044da0ecedbca3" Oct 07 09:01:10 crc kubenswrapper[4875]: I1007 09:01:10.070592 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" event={"ID":"3928c10c-c3da-41eb-96b2-629d67cfb31f","Type":"ContainerStarted","Data":"88afb29220bf87355f3a81b8423e1e6c82592115d5539c502be5868130a1976f"} Oct 07 09:01:53 crc kubenswrapper[4875]: I1007 09:01:53.484344 4875 generic.go:334] "Generic (PLEG): container finished" podID="3dead823-f04a-4f8b-9221-f21b144333f5" containerID="1d51c8c98d518d138d78e8d823a3b3c2c3e657324adc2405194914156a646b3b" exitCode=0 Oct 07 09:01:53 crc kubenswrapper[4875]: I1007 09:01:53.484418 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-wbnxf/must-gather-hvc46" event={"ID":"3dead823-f04a-4f8b-9221-f21b144333f5","Type":"ContainerDied","Data":"1d51c8c98d518d138d78e8d823a3b3c2c3e657324adc2405194914156a646b3b"} Oct 07 09:01:53 crc kubenswrapper[4875]: I1007 09:01:53.485316 4875 scope.go:117] "RemoveContainer" containerID="1d51c8c98d518d138d78e8d823a3b3c2c3e657324adc2405194914156a646b3b" Oct 07 09:01:54 crc kubenswrapper[4875]: I1007 09:01:54.490146 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-wbnxf_must-gather-hvc46_3dead823-f04a-4f8b-9221-f21b144333f5/gather/0.log" Oct 07 09:02:03 crc kubenswrapper[4875]: I1007 09:02:03.209242 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-wbnxf/must-gather-hvc46"] Oct 07 09:02:03 crc kubenswrapper[4875]: I1007 09:02:03.210167 4875 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-wbnxf/must-gather-hvc46" podUID="3dead823-f04a-4f8b-9221-f21b144333f5" containerName="copy" containerID="cri-o://7381e53bfa580a70403f0b8218d5936b3839c57c7c7eb4b63bc807f7e3da4a81" gracePeriod=2 Oct 07 09:02:03 crc kubenswrapper[4875]: I1007 09:02:03.232543 4875 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-wbnxf/must-gather-hvc46"] Oct 07 09:02:03 crc kubenswrapper[4875]: I1007 09:02:03.592535 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-wbnxf_must-gather-hvc46_3dead823-f04a-4f8b-9221-f21b144333f5/copy/0.log" Oct 07 09:02:03 crc kubenswrapper[4875]: I1007 09:02:03.594183 4875 generic.go:334] "Generic (PLEG): container finished" podID="3dead823-f04a-4f8b-9221-f21b144333f5" containerID="7381e53bfa580a70403f0b8218d5936b3839c57c7c7eb4b63bc807f7e3da4a81" exitCode=143 Oct 07 09:02:03 crc kubenswrapper[4875]: I1007 09:02:03.750743 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-wbnxf_must-gather-hvc46_3dead823-f04a-4f8b-9221-f21b144333f5/copy/0.log" Oct 07 09:02:03 crc kubenswrapper[4875]: I1007 09:02:03.756314 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-wbnxf/must-gather-hvc46" Oct 07 09:02:03 crc kubenswrapper[4875]: I1007 09:02:03.809856 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/3dead823-f04a-4f8b-9221-f21b144333f5-must-gather-output\") pod \"3dead823-f04a-4f8b-9221-f21b144333f5\" (UID: \"3dead823-f04a-4f8b-9221-f21b144333f5\") " Oct 07 09:02:03 crc kubenswrapper[4875]: I1007 09:02:03.810081 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w8pqw\" (UniqueName: \"kubernetes.io/projected/3dead823-f04a-4f8b-9221-f21b144333f5-kube-api-access-w8pqw\") pod \"3dead823-f04a-4f8b-9221-f21b144333f5\" (UID: \"3dead823-f04a-4f8b-9221-f21b144333f5\") " Oct 07 09:02:03 crc kubenswrapper[4875]: I1007 09:02:03.819334 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3dead823-f04a-4f8b-9221-f21b144333f5-kube-api-access-w8pqw" (OuterVolumeSpecName: "kube-api-access-w8pqw") pod "3dead823-f04a-4f8b-9221-f21b144333f5" (UID: "3dead823-f04a-4f8b-9221-f21b144333f5"). InnerVolumeSpecName "kube-api-access-w8pqw". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 09:02:03 crc kubenswrapper[4875]: I1007 09:02:03.912235 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w8pqw\" (UniqueName: \"kubernetes.io/projected/3dead823-f04a-4f8b-9221-f21b144333f5-kube-api-access-w8pqw\") on node \"crc\" DevicePath \"\"" Oct 07 09:02:03 crc kubenswrapper[4875]: I1007 09:02:03.974364 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3dead823-f04a-4f8b-9221-f21b144333f5-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "3dead823-f04a-4f8b-9221-f21b144333f5" (UID: "3dead823-f04a-4f8b-9221-f21b144333f5"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 09:02:04 crc kubenswrapper[4875]: I1007 09:02:04.015703 4875 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/3dead823-f04a-4f8b-9221-f21b144333f5-must-gather-output\") on node \"crc\" DevicePath \"\"" Oct 07 09:02:04 crc kubenswrapper[4875]: I1007 09:02:04.608060 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-wbnxf_must-gather-hvc46_3dead823-f04a-4f8b-9221-f21b144333f5/copy/0.log" Oct 07 09:02:04 crc kubenswrapper[4875]: I1007 09:02:04.608859 4875 scope.go:117] "RemoveContainer" containerID="7381e53bfa580a70403f0b8218d5936b3839c57c7c7eb4b63bc807f7e3da4a81" Oct 07 09:02:04 crc kubenswrapper[4875]: I1007 09:02:04.608940 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-wbnxf/must-gather-hvc46" Oct 07 09:02:04 crc kubenswrapper[4875]: I1007 09:02:04.637275 4875 scope.go:117] "RemoveContainer" containerID="1d51c8c98d518d138d78e8d823a3b3c2c3e657324adc2405194914156a646b3b" Oct 07 09:02:05 crc kubenswrapper[4875]: I1007 09:02:05.709451 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3dead823-f04a-4f8b-9221-f21b144333f5" path="/var/lib/kubelet/pods/3dead823-f04a-4f8b-9221-f21b144333f5/volumes" Oct 07 09:02:21 crc kubenswrapper[4875]: I1007 09:02:21.391290 4875 scope.go:117] "RemoveContainer" containerID="52e68739b9a130474c58bc2b3a92eae497adb1bda65852cc0d2fec0e0403ac28" Oct 07 09:02:21 crc kubenswrapper[4875]: I1007 09:02:21.435352 4875 scope.go:117] "RemoveContainer" containerID="670dc9ac1060964b28c80a31fa31ed70e17c880d6e0973794a8e22cf65d9f04a" Oct 07 09:02:21 crc kubenswrapper[4875]: I1007 09:02:21.467266 4875 scope.go:117] "RemoveContainer" containerID="bb93a8191cb0b82d9cb1a1e926eaa0b23ea3347168bda4f60d42ad89d410acdf" Oct 07 09:02:21 crc kubenswrapper[4875]: I1007 09:02:21.488306 4875 scope.go:117] "RemoveContainer" containerID="a265075e9c0a6514402907c44edde88abcdb0b8d33178422fdd293e9c0726ade" Oct 07 09:02:30 crc kubenswrapper[4875]: I1007 09:02:30.816779 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-xfnzz/must-gather-ff6lw"] Oct 07 09:02:30 crc kubenswrapper[4875]: E1007 09:02:30.817849 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3dead823-f04a-4f8b-9221-f21b144333f5" containerName="copy" Oct 07 09:02:30 crc kubenswrapper[4875]: I1007 09:02:30.817868 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="3dead823-f04a-4f8b-9221-f21b144333f5" containerName="copy" Oct 07 09:02:30 crc kubenswrapper[4875]: E1007 09:02:30.817907 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="89f08dcf-ce53-43d8-a1be-fcf0c50358f4" containerName="keystone-cron" Oct 07 09:02:30 crc kubenswrapper[4875]: I1007 09:02:30.817917 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="89f08dcf-ce53-43d8-a1be-fcf0c50358f4" containerName="keystone-cron" Oct 07 09:02:30 crc kubenswrapper[4875]: E1007 09:02:30.817963 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3dead823-f04a-4f8b-9221-f21b144333f5" containerName="gather" Oct 07 09:02:30 crc kubenswrapper[4875]: I1007 09:02:30.817972 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="3dead823-f04a-4f8b-9221-f21b144333f5" containerName="gather" Oct 07 09:02:30 crc kubenswrapper[4875]: I1007 09:02:30.818208 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="3dead823-f04a-4f8b-9221-f21b144333f5" containerName="copy" Oct 07 09:02:30 crc kubenswrapper[4875]: I1007 09:02:30.818237 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="89f08dcf-ce53-43d8-a1be-fcf0c50358f4" containerName="keystone-cron" Oct 07 09:02:30 crc kubenswrapper[4875]: I1007 09:02:30.818258 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="3dead823-f04a-4f8b-9221-f21b144333f5" containerName="gather" Oct 07 09:02:30 crc kubenswrapper[4875]: I1007 09:02:30.819360 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-xfnzz/must-gather-ff6lw" Oct 07 09:02:30 crc kubenswrapper[4875]: I1007 09:02:30.823896 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-xfnzz"/"openshift-service-ca.crt" Oct 07 09:02:30 crc kubenswrapper[4875]: I1007 09:02:30.824012 4875 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-xfnzz"/"default-dockercfg-rqfh4" Oct 07 09:02:30 crc kubenswrapper[4875]: I1007 09:02:30.826334 4875 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-xfnzz"/"kube-root-ca.crt" Oct 07 09:02:30 crc kubenswrapper[4875]: I1007 09:02:30.852053 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-xfnzz/must-gather-ff6lw"] Oct 07 09:02:30 crc kubenswrapper[4875]: I1007 09:02:30.948042 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/5ba0d4cd-d615-4db2-86d6-849496631712-must-gather-output\") pod \"must-gather-ff6lw\" (UID: \"5ba0d4cd-d615-4db2-86d6-849496631712\") " pod="openshift-must-gather-xfnzz/must-gather-ff6lw" Oct 07 09:02:30 crc kubenswrapper[4875]: I1007 09:02:30.948580 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lzczh\" (UniqueName: \"kubernetes.io/projected/5ba0d4cd-d615-4db2-86d6-849496631712-kube-api-access-lzczh\") pod \"must-gather-ff6lw\" (UID: \"5ba0d4cd-d615-4db2-86d6-849496631712\") " pod="openshift-must-gather-xfnzz/must-gather-ff6lw" Oct 07 09:02:31 crc kubenswrapper[4875]: I1007 09:02:31.050634 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lzczh\" (UniqueName: \"kubernetes.io/projected/5ba0d4cd-d615-4db2-86d6-849496631712-kube-api-access-lzczh\") pod \"must-gather-ff6lw\" (UID: \"5ba0d4cd-d615-4db2-86d6-849496631712\") " pod="openshift-must-gather-xfnzz/must-gather-ff6lw" Oct 07 09:02:31 crc kubenswrapper[4875]: I1007 09:02:31.050723 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/5ba0d4cd-d615-4db2-86d6-849496631712-must-gather-output\") pod \"must-gather-ff6lw\" (UID: \"5ba0d4cd-d615-4db2-86d6-849496631712\") " pod="openshift-must-gather-xfnzz/must-gather-ff6lw" Oct 07 09:02:31 crc kubenswrapper[4875]: I1007 09:02:31.051214 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/5ba0d4cd-d615-4db2-86d6-849496631712-must-gather-output\") pod \"must-gather-ff6lw\" (UID: \"5ba0d4cd-d615-4db2-86d6-849496631712\") " pod="openshift-must-gather-xfnzz/must-gather-ff6lw" Oct 07 09:02:31 crc kubenswrapper[4875]: I1007 09:02:31.069562 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lzczh\" (UniqueName: \"kubernetes.io/projected/5ba0d4cd-d615-4db2-86d6-849496631712-kube-api-access-lzczh\") pod \"must-gather-ff6lw\" (UID: \"5ba0d4cd-d615-4db2-86d6-849496631712\") " pod="openshift-must-gather-xfnzz/must-gather-ff6lw" Oct 07 09:02:31 crc kubenswrapper[4875]: I1007 09:02:31.142546 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-xfnzz/must-gather-ff6lw" Oct 07 09:02:31 crc kubenswrapper[4875]: I1007 09:02:31.662072 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-xfnzz/must-gather-ff6lw"] Oct 07 09:02:31 crc kubenswrapper[4875]: I1007 09:02:31.860861 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-xfnzz/must-gather-ff6lw" event={"ID":"5ba0d4cd-d615-4db2-86d6-849496631712","Type":"ContainerStarted","Data":"77d6e238ea9196d78aa6c834664dc65ae979c76dba0ef62118d8fb903b2f1ba2"} Oct 07 09:02:32 crc kubenswrapper[4875]: I1007 09:02:32.873676 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-xfnzz/must-gather-ff6lw" event={"ID":"5ba0d4cd-d615-4db2-86d6-849496631712","Type":"ContainerStarted","Data":"9f63fe6805dd9c76a351630968a495393aa47c7ad00afe73f27d084a1de2422a"} Oct 07 09:02:32 crc kubenswrapper[4875]: I1007 09:02:32.874053 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-xfnzz/must-gather-ff6lw" event={"ID":"5ba0d4cd-d615-4db2-86d6-849496631712","Type":"ContainerStarted","Data":"a4a8aba01167c52407bcf04112de43d6ad4946ab2f2a5efb6e2836f84365a364"} Oct 07 09:02:32 crc kubenswrapper[4875]: I1007 09:02:32.889753 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-xfnzz/must-gather-ff6lw" podStartSLOduration=2.88973541 podStartE2EDuration="2.88973541s" podCreationTimestamp="2025-10-07 09:02:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 09:02:32.887639472 +0000 UTC m=+3977.847410025" watchObservedRunningTime="2025-10-07 09:02:32.88973541 +0000 UTC m=+3977.849505953" Oct 07 09:02:36 crc kubenswrapper[4875]: I1007 09:02:36.107732 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-xfnzz/crc-debug-4r4h2"] Oct 07 09:02:36 crc kubenswrapper[4875]: I1007 09:02:36.109662 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-xfnzz/crc-debug-4r4h2" Oct 07 09:02:36 crc kubenswrapper[4875]: I1007 09:02:36.285662 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/ed68affe-c973-4e6b-b8c3-6288abe2c132-host\") pod \"crc-debug-4r4h2\" (UID: \"ed68affe-c973-4e6b-b8c3-6288abe2c132\") " pod="openshift-must-gather-xfnzz/crc-debug-4r4h2" Oct 07 09:02:36 crc kubenswrapper[4875]: I1007 09:02:36.286118 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tf2qw\" (UniqueName: \"kubernetes.io/projected/ed68affe-c973-4e6b-b8c3-6288abe2c132-kube-api-access-tf2qw\") pod \"crc-debug-4r4h2\" (UID: \"ed68affe-c973-4e6b-b8c3-6288abe2c132\") " pod="openshift-must-gather-xfnzz/crc-debug-4r4h2" Oct 07 09:02:36 crc kubenswrapper[4875]: I1007 09:02:36.388041 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/ed68affe-c973-4e6b-b8c3-6288abe2c132-host\") pod \"crc-debug-4r4h2\" (UID: \"ed68affe-c973-4e6b-b8c3-6288abe2c132\") " pod="openshift-must-gather-xfnzz/crc-debug-4r4h2" Oct 07 09:02:36 crc kubenswrapper[4875]: I1007 09:02:36.388166 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tf2qw\" (UniqueName: \"kubernetes.io/projected/ed68affe-c973-4e6b-b8c3-6288abe2c132-kube-api-access-tf2qw\") pod \"crc-debug-4r4h2\" (UID: \"ed68affe-c973-4e6b-b8c3-6288abe2c132\") " pod="openshift-must-gather-xfnzz/crc-debug-4r4h2" Oct 07 09:02:36 crc kubenswrapper[4875]: I1007 09:02:36.388225 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/ed68affe-c973-4e6b-b8c3-6288abe2c132-host\") pod \"crc-debug-4r4h2\" (UID: \"ed68affe-c973-4e6b-b8c3-6288abe2c132\") " pod="openshift-must-gather-xfnzz/crc-debug-4r4h2" Oct 07 09:02:36 crc kubenswrapper[4875]: I1007 09:02:36.413953 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tf2qw\" (UniqueName: \"kubernetes.io/projected/ed68affe-c973-4e6b-b8c3-6288abe2c132-kube-api-access-tf2qw\") pod \"crc-debug-4r4h2\" (UID: \"ed68affe-c973-4e6b-b8c3-6288abe2c132\") " pod="openshift-must-gather-xfnzz/crc-debug-4r4h2" Oct 07 09:02:36 crc kubenswrapper[4875]: I1007 09:02:36.430536 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-xfnzz/crc-debug-4r4h2" Oct 07 09:02:36 crc kubenswrapper[4875]: W1007 09:02:36.466078 4875 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poded68affe_c973_4e6b_b8c3_6288abe2c132.slice/crio-00eb0ac2b27d2d7ae3b2dd4b47a6253e14a2c549a3435b15774785c1446e6947 WatchSource:0}: Error finding container 00eb0ac2b27d2d7ae3b2dd4b47a6253e14a2c549a3435b15774785c1446e6947: Status 404 returned error can't find the container with id 00eb0ac2b27d2d7ae3b2dd4b47a6253e14a2c549a3435b15774785c1446e6947 Oct 07 09:02:37 crc kubenswrapper[4875]: I1007 09:02:37.018661 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-xfnzz/crc-debug-4r4h2" event={"ID":"ed68affe-c973-4e6b-b8c3-6288abe2c132","Type":"ContainerStarted","Data":"0f12d4c8722e5162f94a58d0770d2eca5e8324a62f697816be95e4e9f68ae071"} Oct 07 09:02:37 crc kubenswrapper[4875]: I1007 09:02:37.019377 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-xfnzz/crc-debug-4r4h2" event={"ID":"ed68affe-c973-4e6b-b8c3-6288abe2c132","Type":"ContainerStarted","Data":"00eb0ac2b27d2d7ae3b2dd4b47a6253e14a2c549a3435b15774785c1446e6947"} Oct 07 09:02:37 crc kubenswrapper[4875]: I1007 09:02:37.044436 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-xfnzz/crc-debug-4r4h2" podStartSLOduration=1.0444163419999999 podStartE2EDuration="1.044416342s" podCreationTimestamp="2025-10-07 09:02:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 09:02:37.036755186 +0000 UTC m=+3981.996525739" watchObservedRunningTime="2025-10-07 09:02:37.044416342 +0000 UTC m=+3982.004186875" Oct 07 09:03:31 crc kubenswrapper[4875]: I1007 09:03:31.221334 4875 patch_prober.go:28] interesting pod/machine-config-daemon-hx68m container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 07 09:03:31 crc kubenswrapper[4875]: I1007 09:03:31.221922 4875 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" podUID="3928c10c-c3da-41eb-96b2-629d67cfb31f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 07 09:03:45 crc kubenswrapper[4875]: I1007 09:03:45.726138 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-d454b8786-74tns_127bbfa5-9171-435f-99d2-069db85c4d67/barbican-api/0.log" Oct 07 09:03:45 crc kubenswrapper[4875]: I1007 09:03:45.733183 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-d454b8786-74tns_127bbfa5-9171-435f-99d2-069db85c4d67/barbican-api-log/0.log" Oct 07 09:03:45 crc kubenswrapper[4875]: I1007 09:03:45.905617 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-5dbb8f897d-wgrt2_5c001ba7-ccf7-40ea-85b4-4ba3c981ac0e/barbican-keystone-listener/0.log" Oct 07 09:03:45 crc kubenswrapper[4875]: I1007 09:03:45.969953 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-5dbb8f897d-wgrt2_5c001ba7-ccf7-40ea-85b4-4ba3c981ac0e/barbican-keystone-listener-log/0.log" Oct 07 09:03:46 crc kubenswrapper[4875]: I1007 09:03:46.106754 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-576fbf56f5-4mtlh_3021a32f-0a9f-4dea-8da2-2ae1df754ccc/barbican-worker/0.log" Oct 07 09:03:46 crc kubenswrapper[4875]: I1007 09:03:46.209487 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-576fbf56f5-4mtlh_3021a32f-0a9f-4dea-8da2-2ae1df754ccc/barbican-worker-log/0.log" Oct 07 09:03:46 crc kubenswrapper[4875]: I1007 09:03:46.326604 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_bootstrap-edpm-deployment-openstack-edpm-ipam-wd6f4_15ba8b2a-ed31-47c5-b655-efb44ceb0134/bootstrap-edpm-deployment-openstack-edpm-ipam/0.log" Oct 07 09:03:46 crc kubenswrapper[4875]: I1007 09:03:46.522451 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_453d5031-7d52-48b9-abd5-5c261297ee70/ceilometer-notification-agent/0.log" Oct 07 09:03:46 crc kubenswrapper[4875]: I1007 09:03:46.583004 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_453d5031-7d52-48b9-abd5-5c261297ee70/ceilometer-central-agent/0.log" Oct 07 09:03:46 crc kubenswrapper[4875]: I1007 09:03:46.632806 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_453d5031-7d52-48b9-abd5-5c261297ee70/proxy-httpd/0.log" Oct 07 09:03:46 crc kubenswrapper[4875]: I1007 09:03:46.708861 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_453d5031-7d52-48b9-abd5-5c261297ee70/sg-core/0.log" Oct 07 09:03:46 crc kubenswrapper[4875]: I1007 09:03:46.884195 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_3a93ed97-4ce8-45f0-b81c-52e3613ea189/cinder-api/0.log" Oct 07 09:03:46 crc kubenswrapper[4875]: I1007 09:03:46.935905 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_3a93ed97-4ce8-45f0-b81c-52e3613ea189/cinder-api-log/0.log" Oct 07 09:03:47 crc kubenswrapper[4875]: I1007 09:03:47.110272 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_63b2e079-16c2-4f61-8ea4-a0fd50150f03/cinder-scheduler/0.log" Oct 07 09:03:47 crc kubenswrapper[4875]: I1007 09:03:47.177664 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_63b2e079-16c2-4f61-8ea4-a0fd50150f03/probe/0.log" Oct 07 09:03:47 crc kubenswrapper[4875]: I1007 09:03:47.284216 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-network-edpm-deployment-openstack-edpm-ipam-jxf7h_7af841b2-2a3c-4cea-a5b1-5f854609190b/configure-network-edpm-deployment-openstack-edpm-ipam/0.log" Oct 07 09:03:47 crc kubenswrapper[4875]: I1007 09:03:47.503939 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-os-edpm-deployment-openstack-edpm-ipam-7pmfq_965bc704-2251-46a2-b947-05d835da9ea9/configure-os-edpm-deployment-openstack-edpm-ipam/0.log" Oct 07 09:03:47 crc kubenswrapper[4875]: I1007 09:03:47.583553 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-os-edpm-deployment-openstack-edpm-ipam-8tqzx_063ffc6e-9bc6-41ea-9d6d-d73e3923c92a/configure-os-edpm-deployment-openstack-edpm-ipam/0.log" Oct 07 09:03:47 crc kubenswrapper[4875]: I1007 09:03:47.726158 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-78c64bc9c5-5gt8q_f505e712-23fa-4ef3-b464-591427bea934/init/0.log" Oct 07 09:03:47 crc kubenswrapper[4875]: I1007 09:03:47.931955 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-78c64bc9c5-5gt8q_f505e712-23fa-4ef3-b464-591427bea934/dnsmasq-dns/0.log" Oct 07 09:03:47 crc kubenswrapper[4875]: I1007 09:03:47.971833 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-78c64bc9c5-5gt8q_f505e712-23fa-4ef3-b464-591427bea934/init/0.log" Oct 07 09:03:48 crc kubenswrapper[4875]: I1007 09:03:48.032704 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_download-cache-edpm-deployment-openstack-edpm-ipam-f424w_7b638466-3fdb-4290-8a73-9f3d018a8ee0/download-cache-edpm-deployment-openstack-edpm-ipam/0.log" Oct 07 09:03:48 crc kubenswrapper[4875]: I1007 09:03:48.168288 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_9d4dbc12-0c00-4b2a-ad57-055d19cebf0a/glance-httpd/0.log" Oct 07 09:03:48 crc kubenswrapper[4875]: I1007 09:03:48.189627 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_9d4dbc12-0c00-4b2a-ad57-055d19cebf0a/glance-log/0.log" Oct 07 09:03:48 crc kubenswrapper[4875]: I1007 09:03:48.331209 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_95d6a4f6-2e39-4b75-aadf-ca829e1e9911/glance-httpd/0.log" Oct 07 09:03:48 crc kubenswrapper[4875]: I1007 09:03:48.365843 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_95d6a4f6-2e39-4b75-aadf-ca829e1e9911/glance-log/0.log" Oct 07 09:03:48 crc kubenswrapper[4875]: I1007 09:03:48.514625 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-64854f4c8-d67s8_cfb37cba-9925-4808-9b9f-6dfd2550c15e/horizon/0.log" Oct 07 09:03:48 crc kubenswrapper[4875]: I1007 09:03:48.736164 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-certs-edpm-deployment-openstack-edpm-ipam-t8wql_f7030085-e862-4c57-9c9e-29e88006533e/install-certs-edpm-deployment-openstack-edpm-ipam/0.log" Oct 07 09:03:48 crc kubenswrapper[4875]: I1007 09:03:48.873817 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-os-edpm-deployment-openstack-edpm-ipam-jwr5d_489def06-7200-4a3d-9d81-a811bac28712/install-os-edpm-deployment-openstack-edpm-ipam/0.log" Oct 07 09:03:48 crc kubenswrapper[4875]: I1007 09:03:48.927467 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-64854f4c8-d67s8_cfb37cba-9925-4808-9b9f-6dfd2550c15e/horizon-log/0.log" Oct 07 09:03:49 crc kubenswrapper[4875]: I1007 09:03:49.143297 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-cron-29330461-27nn2_89f08dcf-ce53-43d8-a1be-fcf0c50358f4/keystone-cron/0.log" Oct 07 09:03:49 crc kubenswrapper[4875]: I1007 09:03:49.183115 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-754566d8d4-jzmbw_bcdbebc0-2911-4723-9a61-718037d0d1dc/keystone-api/0.log" Oct 07 09:03:49 crc kubenswrapper[4875]: I1007 09:03:49.306209 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_kube-state-metrics-0_cd2084f6-1596-45c2-a4ba-1cbd7a1ca565/kube-state-metrics/0.log" Oct 07 09:03:49 crc kubenswrapper[4875]: I1007 09:03:49.415051 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_libvirt-edpm-deployment-openstack-edpm-ipam-69n9w_3ec6c99f-4455-40ea-8a27-bf56298f3e17/libvirt-edpm-deployment-openstack-edpm-ipam/0.log" Oct 07 09:03:49 crc kubenswrapper[4875]: I1007 09:03:49.737637 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-5f5f877689-scxcf_a42b578e-6f65-40c9-94f4-2b843c305470/neutron-httpd/0.log" Oct 07 09:03:49 crc kubenswrapper[4875]: I1007 09:03:49.796211 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-5f5f877689-scxcf_a42b578e-6f65-40c9-94f4-2b843c305470/neutron-api/0.log" Oct 07 09:03:49 crc kubenswrapper[4875]: I1007 09:03:49.997993 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-metadata-edpm-deployment-openstack-edpm-ipam-wh5wz_c6df5d2d-85d8-4d79-b8a2-f3b6f7060019/neutron-metadata-edpm-deployment-openstack-edpm-ipam/0.log" Oct 07 09:03:50 crc kubenswrapper[4875]: I1007 09:03:50.538362 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_3774daf2-c509-494e-81f3-9cadf5a30459/nova-api-log/0.log" Oct 07 09:03:50 crc kubenswrapper[4875]: I1007 09:03:50.819775 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell0-conductor-0_98872838-7b40-4854-97c4-edddb9a931a5/nova-cell0-conductor-conductor/0.log" Oct 07 09:03:50 crc kubenswrapper[4875]: I1007 09:03:50.967210 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_3774daf2-c509-494e-81f3-9cadf5a30459/nova-api-api/0.log" Oct 07 09:03:51 crc kubenswrapper[4875]: I1007 09:03:51.105027 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-conductor-0_eb928cc7-3310-4d3e-929c-d470f8a8f8bb/nova-cell1-conductor-conductor/0.log" Oct 07 09:03:51 crc kubenswrapper[4875]: I1007 09:03:51.315017 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-novncproxy-0_e35464b6-e9ab-4262-a99d-efad04dbd0e0/nova-cell1-novncproxy-novncproxy/0.log" Oct 07 09:03:51 crc kubenswrapper[4875]: I1007 09:03:51.397819 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-edpm-deployment-openstack-edpm-ipam-kmx8n_1e54ad8c-e627-4e27-90d1-ea193eb2f42f/nova-edpm-deployment-openstack-edpm-ipam/0.log" Oct 07 09:03:51 crc kubenswrapper[4875]: I1007 09:03:51.733762 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_47ec1bb6-1d05-4ed7-b706-6d25c4146e7d/nova-metadata-log/0.log" Oct 07 09:03:52 crc kubenswrapper[4875]: I1007 09:03:52.176134 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-scheduler-0_d49ca1ef-7891-416e-9d67-d17b4b031624/nova-scheduler-scheduler/0.log" Oct 07 09:03:52 crc kubenswrapper[4875]: I1007 09:03:52.270188 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_8dad2a19-0fbf-46b6-b534-7f2712b644d7/mysql-bootstrap/0.log" Oct 07 09:03:52 crc kubenswrapper[4875]: I1007 09:03:52.399474 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_8dad2a19-0fbf-46b6-b534-7f2712b644d7/mysql-bootstrap/0.log" Oct 07 09:03:52 crc kubenswrapper[4875]: I1007 09:03:52.511510 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_8dad2a19-0fbf-46b6-b534-7f2712b644d7/galera/0.log" Oct 07 09:03:52 crc kubenswrapper[4875]: I1007 09:03:52.750693 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_027e3c4c-1861-4933-9d30-636575099b5c/mysql-bootstrap/0.log" Oct 07 09:03:52 crc kubenswrapper[4875]: I1007 09:03:52.953982 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_027e3c4c-1861-4933-9d30-636575099b5c/mysql-bootstrap/0.log" Oct 07 09:03:53 crc kubenswrapper[4875]: I1007 09:03:53.011669 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_027e3c4c-1861-4933-9d30-636575099b5c/galera/0.log" Oct 07 09:03:53 crc kubenswrapper[4875]: I1007 09:03:53.049522 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_47ec1bb6-1d05-4ed7-b706-6d25c4146e7d/nova-metadata-metadata/0.log" Oct 07 09:03:53 crc kubenswrapper[4875]: I1007 09:03:53.219222 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstackclient_c25e751b-f41b-4571-92e4-81d1b263ed48/openstackclient/0.log" Oct 07 09:03:54 crc kubenswrapper[4875]: I1007 09:03:54.103678 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-6rsbk_e984b1d0-f011-46a5-8339-966f44e3c603/openstack-network-exporter/0.log" Oct 07 09:03:54 crc kubenswrapper[4875]: I1007 09:03:54.268781 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-npwcn_97a62f1e-e3e0-4592-82a8-2524ba6df291/ovn-controller/0.log" Oct 07 09:03:54 crc kubenswrapper[4875]: I1007 09:03:54.302503 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-vwbf5_318e0040-83b4-4fa3-95ff-768b9eb422e7/ovsdb-server-init/0.log" Oct 07 09:03:54 crc kubenswrapper[4875]: I1007 09:03:54.494286 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-vwbf5_318e0040-83b4-4fa3-95ff-768b9eb422e7/ovsdb-server-init/0.log" Oct 07 09:03:54 crc kubenswrapper[4875]: I1007 09:03:54.530862 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-vwbf5_318e0040-83b4-4fa3-95ff-768b9eb422e7/ovs-vswitchd/0.log" Oct 07 09:03:54 crc kubenswrapper[4875]: I1007 09:03:54.534915 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-vwbf5_318e0040-83b4-4fa3-95ff-768b9eb422e7/ovsdb-server/0.log" Oct 07 09:03:54 crc kubenswrapper[4875]: I1007 09:03:54.825504 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-edpm-deployment-openstack-edpm-ipam-mrzjf_97901af5-a883-4d4f-acd8-9425772903a9/ovn-edpm-deployment-openstack-edpm-ipam/0.log" Oct 07 09:03:55 crc kubenswrapper[4875]: I1007 09:03:55.094947 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_84eec933-bbc6-4961-940a-1a26f31d2fd3/openstack-network-exporter/0.log" Oct 07 09:03:55 crc kubenswrapper[4875]: I1007 09:03:55.191819 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_84eec933-bbc6-4961-940a-1a26f31d2fd3/ovn-northd/0.log" Oct 07 09:03:55 crc kubenswrapper[4875]: I1007 09:03:55.639084 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_e845f7e2-55f1-445d-8155-8a92bc2ee519/openstack-network-exporter/0.log" Oct 07 09:03:55 crc kubenswrapper[4875]: I1007 09:03:55.698786 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_e845f7e2-55f1-445d-8155-8a92bc2ee519/ovsdbserver-nb/0.log" Oct 07 09:03:55 crc kubenswrapper[4875]: I1007 09:03:55.817483 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_021914db-05c8-4ae9-a24e-dee6bfabff00/openstack-network-exporter/0.log" Oct 07 09:03:55 crc kubenswrapper[4875]: I1007 09:03:55.918416 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_021914db-05c8-4ae9-a24e-dee6bfabff00/ovsdbserver-sb/0.log" Oct 07 09:03:56 crc kubenswrapper[4875]: I1007 09:03:56.101656 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-548b9747d4-bkwjt_ba464fba-e931-4f8a-be56-6b5456e1572d/placement-api/0.log" Oct 07 09:03:56 crc kubenswrapper[4875]: I1007 09:03:56.270702 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-548b9747d4-bkwjt_ba464fba-e931-4f8a-be56-6b5456e1572d/placement-log/0.log" Oct 07 09:03:56 crc kubenswrapper[4875]: I1007 09:03:56.333134 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_8bff65c7-6542-4501-90d2-fedc97d9a9d7/setup-container/0.log" Oct 07 09:03:56 crc kubenswrapper[4875]: I1007 09:03:56.571574 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_8bff65c7-6542-4501-90d2-fedc97d9a9d7/setup-container/0.log" Oct 07 09:03:56 crc kubenswrapper[4875]: I1007 09:03:56.585734 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_8bff65c7-6542-4501-90d2-fedc97d9a9d7/rabbitmq/0.log" Oct 07 09:03:56 crc kubenswrapper[4875]: I1007 09:03:56.824034 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_e2d60697-89d7-42a0-9457-efef02815764/setup-container/0.log" Oct 07 09:03:57 crc kubenswrapper[4875]: I1007 09:03:57.006631 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_e2d60697-89d7-42a0-9457-efef02815764/setup-container/0.log" Oct 07 09:03:57 crc kubenswrapper[4875]: I1007 09:03:57.090728 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_e2d60697-89d7-42a0-9457-efef02815764/rabbitmq/0.log" Oct 07 09:03:57 crc kubenswrapper[4875]: I1007 09:03:57.228333 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_reboot-os-edpm-deployment-openstack-edpm-ipam-r76h5_f21455d3-51d3-464a-acd5-d707dfa2ee70/reboot-os-edpm-deployment-openstack-edpm-ipam/0.log" Oct 07 09:03:57 crc kubenswrapper[4875]: I1007 09:03:57.376712 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_redhat-edpm-deployment-openstack-edpm-ipam-mztgk_19285527-95a7-43c0-9366-3d8895c09835/redhat-edpm-deployment-openstack-edpm-ipam/0.log" Oct 07 09:03:57 crc kubenswrapper[4875]: I1007 09:03:57.566366 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_repo-setup-edpm-deployment-openstack-edpm-ipam-grkp4_23730b76-30ac-47bf-8043-3c713a209e1e/repo-setup-edpm-deployment-openstack-edpm-ipam/0.log" Oct 07 09:03:57 crc kubenswrapper[4875]: I1007 09:03:57.703075 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_run-os-edpm-deployment-openstack-edpm-ipam-zlwhl_3910d9af-0583-40ef-887d-e73ddf795725/run-os-edpm-deployment-openstack-edpm-ipam/0.log" Oct 07 09:03:57 crc kubenswrapper[4875]: I1007 09:03:57.870490 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ssh-known-hosts-edpm-deployment-prbmz_dfa58f34-fade-4504-8329-a9b13eb13726/ssh-known-hosts-edpm-deployment/0.log" Oct 07 09:03:58 crc kubenswrapper[4875]: I1007 09:03:58.098343 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-88cf5ccd5-wf5cp_74566d25-db70-4528-b9c4-89b32863c2eb/proxy-server/0.log" Oct 07 09:03:58 crc kubenswrapper[4875]: I1007 09:03:58.236813 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-88cf5ccd5-wf5cp_74566d25-db70-4528-b9c4-89b32863c2eb/proxy-httpd/0.log" Oct 07 09:03:58 crc kubenswrapper[4875]: I1007 09:03:58.303617 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-ring-rebalance-7bh99_e4ed1841-ffa8-4d3b-8a66-43221118d007/swift-ring-rebalance/0.log" Oct 07 09:03:58 crc kubenswrapper[4875]: I1007 09:03:58.474031 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_97048952-fbe0-46f2-8163-00ec9381508b/account-auditor/0.log" Oct 07 09:03:58 crc kubenswrapper[4875]: I1007 09:03:58.501108 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_97048952-fbe0-46f2-8163-00ec9381508b/account-reaper/0.log" Oct 07 09:03:58 crc kubenswrapper[4875]: I1007 09:03:58.737564 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_97048952-fbe0-46f2-8163-00ec9381508b/container-auditor/0.log" Oct 07 09:03:58 crc kubenswrapper[4875]: I1007 09:03:58.760615 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_97048952-fbe0-46f2-8163-00ec9381508b/account-server/0.log" Oct 07 09:03:58 crc kubenswrapper[4875]: I1007 09:03:58.768153 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_97048952-fbe0-46f2-8163-00ec9381508b/account-replicator/0.log" Oct 07 09:03:58 crc kubenswrapper[4875]: I1007 09:03:58.969699 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_97048952-fbe0-46f2-8163-00ec9381508b/container-replicator/0.log" Oct 07 09:03:58 crc kubenswrapper[4875]: I1007 09:03:58.977192 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_97048952-fbe0-46f2-8163-00ec9381508b/container-updater/0.log" Oct 07 09:03:58 crc kubenswrapper[4875]: I1007 09:03:58.997097 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_97048952-fbe0-46f2-8163-00ec9381508b/container-server/0.log" Oct 07 09:03:59 crc kubenswrapper[4875]: I1007 09:03:59.202762 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_97048952-fbe0-46f2-8163-00ec9381508b/object-replicator/0.log" Oct 07 09:03:59 crc kubenswrapper[4875]: I1007 09:03:59.212657 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_97048952-fbe0-46f2-8163-00ec9381508b/object-auditor/0.log" Oct 07 09:03:59 crc kubenswrapper[4875]: I1007 09:03:59.261609 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_97048952-fbe0-46f2-8163-00ec9381508b/object-expirer/0.log" Oct 07 09:03:59 crc kubenswrapper[4875]: I1007 09:03:59.382031 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_97048952-fbe0-46f2-8163-00ec9381508b/object-server/0.log" Oct 07 09:03:59 crc kubenswrapper[4875]: I1007 09:03:59.420852 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_97048952-fbe0-46f2-8163-00ec9381508b/object-updater/0.log" Oct 07 09:03:59 crc kubenswrapper[4875]: I1007 09:03:59.476506 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_97048952-fbe0-46f2-8163-00ec9381508b/rsync/0.log" Oct 07 09:03:59 crc kubenswrapper[4875]: I1007 09:03:59.580006 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_97048952-fbe0-46f2-8163-00ec9381508b/swift-recon-cron/0.log" Oct 07 09:03:59 crc kubenswrapper[4875]: I1007 09:03:59.732351 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_telemetry-edpm-deployment-openstack-edpm-ipam-bm42x_b06f38f5-d4e4-4de8-aab3-f171fc82d880/telemetry-edpm-deployment-openstack-edpm-ipam/0.log" Oct 07 09:03:59 crc kubenswrapper[4875]: I1007 09:03:59.899765 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_tempest-tests-tempest_b9d21853-761a-4786-baa2-e0e00011a9d5/tempest-tests-tempest-tests-runner/0.log" Oct 07 09:03:59 crc kubenswrapper[4875]: I1007 09:03:59.990065 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_test-operator-logs-pod-tempest-tempest-tests-tempest_f90c30e8-e60e-415e-b547-e02a254b8f24/test-operator-logs-container/0.log" Oct 07 09:04:00 crc kubenswrapper[4875]: I1007 09:04:00.180832 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_validate-network-edpm-deployment-openstack-edpm-ipam-lzlw5_7182b81e-0002-4025-ab76-31844db2d768/validate-network-edpm-deployment-openstack-edpm-ipam/0.log" Oct 07 09:04:01 crc kubenswrapper[4875]: I1007 09:04:01.220596 4875 patch_prober.go:28] interesting pod/machine-config-daemon-hx68m container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 07 09:04:01 crc kubenswrapper[4875]: I1007 09:04:01.221200 4875 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" podUID="3928c10c-c3da-41eb-96b2-629d67cfb31f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 07 09:04:07 crc kubenswrapper[4875]: I1007 09:04:07.759538 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_memcached-0_4cc2ad0d-e745-4a93-a5fb-d9f891bc3474/memcached/0.log" Oct 07 09:04:21 crc kubenswrapper[4875]: I1007 09:04:21.615551 4875 scope.go:117] "RemoveContainer" containerID="50b86ce614a1e876314892fc17942263e38b116263704dd9452e53d3b78caf35" Oct 07 09:04:30 crc kubenswrapper[4875]: I1007 09:04:30.056076 4875 generic.go:334] "Generic (PLEG): container finished" podID="ed68affe-c973-4e6b-b8c3-6288abe2c132" containerID="0f12d4c8722e5162f94a58d0770d2eca5e8324a62f697816be95e4e9f68ae071" exitCode=0 Oct 07 09:04:30 crc kubenswrapper[4875]: I1007 09:04:30.056170 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-xfnzz/crc-debug-4r4h2" event={"ID":"ed68affe-c973-4e6b-b8c3-6288abe2c132","Type":"ContainerDied","Data":"0f12d4c8722e5162f94a58d0770d2eca5e8324a62f697816be95e4e9f68ae071"} Oct 07 09:04:31 crc kubenswrapper[4875]: I1007 09:04:31.163187 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-xfnzz/crc-debug-4r4h2" Oct 07 09:04:31 crc kubenswrapper[4875]: I1007 09:04:31.198227 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-xfnzz/crc-debug-4r4h2"] Oct 07 09:04:31 crc kubenswrapper[4875]: I1007 09:04:31.205899 4875 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-xfnzz/crc-debug-4r4h2"] Oct 07 09:04:31 crc kubenswrapper[4875]: I1007 09:04:31.206431 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tf2qw\" (UniqueName: \"kubernetes.io/projected/ed68affe-c973-4e6b-b8c3-6288abe2c132-kube-api-access-tf2qw\") pod \"ed68affe-c973-4e6b-b8c3-6288abe2c132\" (UID: \"ed68affe-c973-4e6b-b8c3-6288abe2c132\") " Oct 07 09:04:31 crc kubenswrapper[4875]: I1007 09:04:31.206634 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/ed68affe-c973-4e6b-b8c3-6288abe2c132-host\") pod \"ed68affe-c973-4e6b-b8c3-6288abe2c132\" (UID: \"ed68affe-c973-4e6b-b8c3-6288abe2c132\") " Oct 07 09:04:31 crc kubenswrapper[4875]: I1007 09:04:31.206740 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ed68affe-c973-4e6b-b8c3-6288abe2c132-host" (OuterVolumeSpecName: "host") pod "ed68affe-c973-4e6b-b8c3-6288abe2c132" (UID: "ed68affe-c973-4e6b-b8c3-6288abe2c132"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 07 09:04:31 crc kubenswrapper[4875]: I1007 09:04:31.207122 4875 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/ed68affe-c973-4e6b-b8c3-6288abe2c132-host\") on node \"crc\" DevicePath \"\"" Oct 07 09:04:31 crc kubenswrapper[4875]: I1007 09:04:31.213116 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ed68affe-c973-4e6b-b8c3-6288abe2c132-kube-api-access-tf2qw" (OuterVolumeSpecName: "kube-api-access-tf2qw") pod "ed68affe-c973-4e6b-b8c3-6288abe2c132" (UID: "ed68affe-c973-4e6b-b8c3-6288abe2c132"). InnerVolumeSpecName "kube-api-access-tf2qw". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 09:04:31 crc kubenswrapper[4875]: I1007 09:04:31.221144 4875 patch_prober.go:28] interesting pod/machine-config-daemon-hx68m container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 07 09:04:31 crc kubenswrapper[4875]: I1007 09:04:31.221218 4875 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" podUID="3928c10c-c3da-41eb-96b2-629d67cfb31f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 07 09:04:31 crc kubenswrapper[4875]: I1007 09:04:31.221285 4875 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" Oct 07 09:04:31 crc kubenswrapper[4875]: I1007 09:04:31.222240 4875 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"88afb29220bf87355f3a81b8423e1e6c82592115d5539c502be5868130a1976f"} pod="openshift-machine-config-operator/machine-config-daemon-hx68m" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 07 09:04:31 crc kubenswrapper[4875]: I1007 09:04:31.222313 4875 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" podUID="3928c10c-c3da-41eb-96b2-629d67cfb31f" containerName="machine-config-daemon" containerID="cri-o://88afb29220bf87355f3a81b8423e1e6c82592115d5539c502be5868130a1976f" gracePeriod=600 Oct 07 09:04:31 crc kubenswrapper[4875]: I1007 09:04:31.309386 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tf2qw\" (UniqueName: \"kubernetes.io/projected/ed68affe-c973-4e6b-b8c3-6288abe2c132-kube-api-access-tf2qw\") on node \"crc\" DevicePath \"\"" Oct 07 09:04:31 crc kubenswrapper[4875]: I1007 09:04:31.709046 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ed68affe-c973-4e6b-b8c3-6288abe2c132" path="/var/lib/kubelet/pods/ed68affe-c973-4e6b-b8c3-6288abe2c132/volumes" Oct 07 09:04:32 crc kubenswrapper[4875]: I1007 09:04:32.073588 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-xfnzz/crc-debug-4r4h2" Oct 07 09:04:32 crc kubenswrapper[4875]: I1007 09:04:32.074612 4875 scope.go:117] "RemoveContainer" containerID="0f12d4c8722e5162f94a58d0770d2eca5e8324a62f697816be95e4e9f68ae071" Oct 07 09:04:32 crc kubenswrapper[4875]: I1007 09:04:32.077447 4875 generic.go:334] "Generic (PLEG): container finished" podID="3928c10c-c3da-41eb-96b2-629d67cfb31f" containerID="88afb29220bf87355f3a81b8423e1e6c82592115d5539c502be5868130a1976f" exitCode=0 Oct 07 09:04:32 crc kubenswrapper[4875]: I1007 09:04:32.077491 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" event={"ID":"3928c10c-c3da-41eb-96b2-629d67cfb31f","Type":"ContainerDied","Data":"88afb29220bf87355f3a81b8423e1e6c82592115d5539c502be5868130a1976f"} Oct 07 09:04:32 crc kubenswrapper[4875]: I1007 09:04:32.077544 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" event={"ID":"3928c10c-c3da-41eb-96b2-629d67cfb31f","Type":"ContainerStarted","Data":"21bf6bf5228a798ba24e02287093bd82c692d5a50ac902514b7d5b0597423fe9"} Oct 07 09:04:32 crc kubenswrapper[4875]: I1007 09:04:32.100005 4875 scope.go:117] "RemoveContainer" containerID="df6fd2d1c37696e37bee974a5fbbdc769e47de499d9e7a0feb044da0ecedbca3" Oct 07 09:04:32 crc kubenswrapper[4875]: I1007 09:04:32.351936 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-xfnzz/crc-debug-4rhz9"] Oct 07 09:04:32 crc kubenswrapper[4875]: E1007 09:04:32.352701 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ed68affe-c973-4e6b-b8c3-6288abe2c132" containerName="container-00" Oct 07 09:04:32 crc kubenswrapper[4875]: I1007 09:04:32.352720 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="ed68affe-c973-4e6b-b8c3-6288abe2c132" containerName="container-00" Oct 07 09:04:32 crc kubenswrapper[4875]: I1007 09:04:32.352999 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="ed68affe-c973-4e6b-b8c3-6288abe2c132" containerName="container-00" Oct 07 09:04:32 crc kubenswrapper[4875]: I1007 09:04:32.353632 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-xfnzz/crc-debug-4rhz9" Oct 07 09:04:32 crc kubenswrapper[4875]: I1007 09:04:32.430839 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z8vjd\" (UniqueName: \"kubernetes.io/projected/8eae5884-80be-495e-9f42-d3793f2b9fa1-kube-api-access-z8vjd\") pod \"crc-debug-4rhz9\" (UID: \"8eae5884-80be-495e-9f42-d3793f2b9fa1\") " pod="openshift-must-gather-xfnzz/crc-debug-4rhz9" Oct 07 09:04:32 crc kubenswrapper[4875]: I1007 09:04:32.430914 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/8eae5884-80be-495e-9f42-d3793f2b9fa1-host\") pod \"crc-debug-4rhz9\" (UID: \"8eae5884-80be-495e-9f42-d3793f2b9fa1\") " pod="openshift-must-gather-xfnzz/crc-debug-4rhz9" Oct 07 09:04:32 crc kubenswrapper[4875]: I1007 09:04:32.532443 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z8vjd\" (UniqueName: \"kubernetes.io/projected/8eae5884-80be-495e-9f42-d3793f2b9fa1-kube-api-access-z8vjd\") pod \"crc-debug-4rhz9\" (UID: \"8eae5884-80be-495e-9f42-d3793f2b9fa1\") " pod="openshift-must-gather-xfnzz/crc-debug-4rhz9" Oct 07 09:04:32 crc kubenswrapper[4875]: I1007 09:04:32.532553 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/8eae5884-80be-495e-9f42-d3793f2b9fa1-host\") pod \"crc-debug-4rhz9\" (UID: \"8eae5884-80be-495e-9f42-d3793f2b9fa1\") " pod="openshift-must-gather-xfnzz/crc-debug-4rhz9" Oct 07 09:04:32 crc kubenswrapper[4875]: I1007 09:04:32.532671 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/8eae5884-80be-495e-9f42-d3793f2b9fa1-host\") pod \"crc-debug-4rhz9\" (UID: \"8eae5884-80be-495e-9f42-d3793f2b9fa1\") " pod="openshift-must-gather-xfnzz/crc-debug-4rhz9" Oct 07 09:04:32 crc kubenswrapper[4875]: I1007 09:04:32.830544 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z8vjd\" (UniqueName: \"kubernetes.io/projected/8eae5884-80be-495e-9f42-d3793f2b9fa1-kube-api-access-z8vjd\") pod \"crc-debug-4rhz9\" (UID: \"8eae5884-80be-495e-9f42-d3793f2b9fa1\") " pod="openshift-must-gather-xfnzz/crc-debug-4rhz9" Oct 07 09:04:32 crc kubenswrapper[4875]: I1007 09:04:32.969903 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-xfnzz/crc-debug-4rhz9" Oct 07 09:04:33 crc kubenswrapper[4875]: I1007 09:04:33.100647 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-xfnzz/crc-debug-4rhz9" event={"ID":"8eae5884-80be-495e-9f42-d3793f2b9fa1","Type":"ContainerStarted","Data":"561870a92e432f63bbc700ae1cb720b5fa64837137017692f4ed3cf40fcfdf89"} Oct 07 09:04:34 crc kubenswrapper[4875]: I1007 09:04:34.113113 4875 generic.go:334] "Generic (PLEG): container finished" podID="8eae5884-80be-495e-9f42-d3793f2b9fa1" containerID="bef61ff161ab57de2f7d690e7fa5975fe777348b46afe7309cd2e548ed0450bb" exitCode=0 Oct 07 09:04:34 crc kubenswrapper[4875]: I1007 09:04:34.113401 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-xfnzz/crc-debug-4rhz9" event={"ID":"8eae5884-80be-495e-9f42-d3793f2b9fa1","Type":"ContainerDied","Data":"bef61ff161ab57de2f7d690e7fa5975fe777348b46afe7309cd2e548ed0450bb"} Oct 07 09:04:35 crc kubenswrapper[4875]: I1007 09:04:35.220154 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-xfnzz/crc-debug-4rhz9" Oct 07 09:04:35 crc kubenswrapper[4875]: I1007 09:04:35.277434 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z8vjd\" (UniqueName: \"kubernetes.io/projected/8eae5884-80be-495e-9f42-d3793f2b9fa1-kube-api-access-z8vjd\") pod \"8eae5884-80be-495e-9f42-d3793f2b9fa1\" (UID: \"8eae5884-80be-495e-9f42-d3793f2b9fa1\") " Oct 07 09:04:35 crc kubenswrapper[4875]: I1007 09:04:35.277567 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/8eae5884-80be-495e-9f42-d3793f2b9fa1-host\") pod \"8eae5884-80be-495e-9f42-d3793f2b9fa1\" (UID: \"8eae5884-80be-495e-9f42-d3793f2b9fa1\") " Oct 07 09:04:35 crc kubenswrapper[4875]: I1007 09:04:35.277931 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/8eae5884-80be-495e-9f42-d3793f2b9fa1-host" (OuterVolumeSpecName: "host") pod "8eae5884-80be-495e-9f42-d3793f2b9fa1" (UID: "8eae5884-80be-495e-9f42-d3793f2b9fa1"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 07 09:04:35 crc kubenswrapper[4875]: I1007 09:04:35.278045 4875 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/8eae5884-80be-495e-9f42-d3793f2b9fa1-host\") on node \"crc\" DevicePath \"\"" Oct 07 09:04:35 crc kubenswrapper[4875]: I1007 09:04:35.283076 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8eae5884-80be-495e-9f42-d3793f2b9fa1-kube-api-access-z8vjd" (OuterVolumeSpecName: "kube-api-access-z8vjd") pod "8eae5884-80be-495e-9f42-d3793f2b9fa1" (UID: "8eae5884-80be-495e-9f42-d3793f2b9fa1"). InnerVolumeSpecName "kube-api-access-z8vjd". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 09:04:35 crc kubenswrapper[4875]: I1007 09:04:35.379364 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z8vjd\" (UniqueName: \"kubernetes.io/projected/8eae5884-80be-495e-9f42-d3793f2b9fa1-kube-api-access-z8vjd\") on node \"crc\" DevicePath \"\"" Oct 07 09:04:36 crc kubenswrapper[4875]: I1007 09:04:36.133219 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-xfnzz/crc-debug-4rhz9" event={"ID":"8eae5884-80be-495e-9f42-d3793f2b9fa1","Type":"ContainerDied","Data":"561870a92e432f63bbc700ae1cb720b5fa64837137017692f4ed3cf40fcfdf89"} Oct 07 09:04:36 crc kubenswrapper[4875]: I1007 09:04:36.133550 4875 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="561870a92e432f63bbc700ae1cb720b5fa64837137017692f4ed3cf40fcfdf89" Oct 07 09:04:36 crc kubenswrapper[4875]: I1007 09:04:36.133433 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-xfnzz/crc-debug-4rhz9" Oct 07 09:04:40 crc kubenswrapper[4875]: I1007 09:04:40.946064 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-xfnzz/crc-debug-4rhz9"] Oct 07 09:04:40 crc kubenswrapper[4875]: I1007 09:04:40.954404 4875 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-xfnzz/crc-debug-4rhz9"] Oct 07 09:04:41 crc kubenswrapper[4875]: I1007 09:04:41.708560 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8eae5884-80be-495e-9f42-d3793f2b9fa1" path="/var/lib/kubelet/pods/8eae5884-80be-495e-9f42-d3793f2b9fa1/volumes" Oct 07 09:04:42 crc kubenswrapper[4875]: I1007 09:04:42.102431 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-xfnzz/crc-debug-mkzlg"] Oct 07 09:04:42 crc kubenswrapper[4875]: E1007 09:04:42.102925 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8eae5884-80be-495e-9f42-d3793f2b9fa1" containerName="container-00" Oct 07 09:04:42 crc kubenswrapper[4875]: I1007 09:04:42.102943 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="8eae5884-80be-495e-9f42-d3793f2b9fa1" containerName="container-00" Oct 07 09:04:42 crc kubenswrapper[4875]: I1007 09:04:42.103219 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="8eae5884-80be-495e-9f42-d3793f2b9fa1" containerName="container-00" Oct 07 09:04:42 crc kubenswrapper[4875]: I1007 09:04:42.103974 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-xfnzz/crc-debug-mkzlg" Oct 07 09:04:42 crc kubenswrapper[4875]: I1007 09:04:42.182063 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z5gqm\" (UniqueName: \"kubernetes.io/projected/ada48417-d933-4feb-a9e6-f4574843bf1f-kube-api-access-z5gqm\") pod \"crc-debug-mkzlg\" (UID: \"ada48417-d933-4feb-a9e6-f4574843bf1f\") " pod="openshift-must-gather-xfnzz/crc-debug-mkzlg" Oct 07 09:04:42 crc kubenswrapper[4875]: I1007 09:04:42.182256 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/ada48417-d933-4feb-a9e6-f4574843bf1f-host\") pod \"crc-debug-mkzlg\" (UID: \"ada48417-d933-4feb-a9e6-f4574843bf1f\") " pod="openshift-must-gather-xfnzz/crc-debug-mkzlg" Oct 07 09:04:42 crc kubenswrapper[4875]: I1007 09:04:42.283740 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/ada48417-d933-4feb-a9e6-f4574843bf1f-host\") pod \"crc-debug-mkzlg\" (UID: \"ada48417-d933-4feb-a9e6-f4574843bf1f\") " pod="openshift-must-gather-xfnzz/crc-debug-mkzlg" Oct 07 09:04:42 crc kubenswrapper[4875]: I1007 09:04:42.283868 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z5gqm\" (UniqueName: \"kubernetes.io/projected/ada48417-d933-4feb-a9e6-f4574843bf1f-kube-api-access-z5gqm\") pod \"crc-debug-mkzlg\" (UID: \"ada48417-d933-4feb-a9e6-f4574843bf1f\") " pod="openshift-must-gather-xfnzz/crc-debug-mkzlg" Oct 07 09:04:42 crc kubenswrapper[4875]: I1007 09:04:42.283923 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/ada48417-d933-4feb-a9e6-f4574843bf1f-host\") pod \"crc-debug-mkzlg\" (UID: \"ada48417-d933-4feb-a9e6-f4574843bf1f\") " pod="openshift-must-gather-xfnzz/crc-debug-mkzlg" Oct 07 09:04:42 crc kubenswrapper[4875]: I1007 09:04:42.305661 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z5gqm\" (UniqueName: \"kubernetes.io/projected/ada48417-d933-4feb-a9e6-f4574843bf1f-kube-api-access-z5gqm\") pod \"crc-debug-mkzlg\" (UID: \"ada48417-d933-4feb-a9e6-f4574843bf1f\") " pod="openshift-must-gather-xfnzz/crc-debug-mkzlg" Oct 07 09:04:42 crc kubenswrapper[4875]: I1007 09:04:42.424306 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-xfnzz/crc-debug-mkzlg" Oct 07 09:04:43 crc kubenswrapper[4875]: I1007 09:04:43.193361 4875 generic.go:334] "Generic (PLEG): container finished" podID="ada48417-d933-4feb-a9e6-f4574843bf1f" containerID="079213f98e86b06c8c7e0b198830800c831a02c27abf581d63ef4bf332c92031" exitCode=0 Oct 07 09:04:43 crc kubenswrapper[4875]: I1007 09:04:43.193455 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-xfnzz/crc-debug-mkzlg" event={"ID":"ada48417-d933-4feb-a9e6-f4574843bf1f","Type":"ContainerDied","Data":"079213f98e86b06c8c7e0b198830800c831a02c27abf581d63ef4bf332c92031"} Oct 07 09:04:43 crc kubenswrapper[4875]: I1007 09:04:43.193706 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-xfnzz/crc-debug-mkzlg" event={"ID":"ada48417-d933-4feb-a9e6-f4574843bf1f","Type":"ContainerStarted","Data":"8eb17a35c0f50f9a35ba46a97b009872f45238100163815983c13b1afff81f16"} Oct 07 09:04:43 crc kubenswrapper[4875]: I1007 09:04:43.234831 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-xfnzz/crc-debug-mkzlg"] Oct 07 09:04:43 crc kubenswrapper[4875]: I1007 09:04:43.244665 4875 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-xfnzz/crc-debug-mkzlg"] Oct 07 09:04:44 crc kubenswrapper[4875]: I1007 09:04:44.324574 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-xfnzz/crc-debug-mkzlg" Oct 07 09:04:44 crc kubenswrapper[4875]: I1007 09:04:44.424632 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z5gqm\" (UniqueName: \"kubernetes.io/projected/ada48417-d933-4feb-a9e6-f4574843bf1f-kube-api-access-z5gqm\") pod \"ada48417-d933-4feb-a9e6-f4574843bf1f\" (UID: \"ada48417-d933-4feb-a9e6-f4574843bf1f\") " Oct 07 09:04:44 crc kubenswrapper[4875]: I1007 09:04:44.424911 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/ada48417-d933-4feb-a9e6-f4574843bf1f-host\") pod \"ada48417-d933-4feb-a9e6-f4574843bf1f\" (UID: \"ada48417-d933-4feb-a9e6-f4574843bf1f\") " Oct 07 09:04:44 crc kubenswrapper[4875]: I1007 09:04:44.425522 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ada48417-d933-4feb-a9e6-f4574843bf1f-host" (OuterVolumeSpecName: "host") pod "ada48417-d933-4feb-a9e6-f4574843bf1f" (UID: "ada48417-d933-4feb-a9e6-f4574843bf1f"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 07 09:04:44 crc kubenswrapper[4875]: I1007 09:04:44.433131 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ada48417-d933-4feb-a9e6-f4574843bf1f-kube-api-access-z5gqm" (OuterVolumeSpecName: "kube-api-access-z5gqm") pod "ada48417-d933-4feb-a9e6-f4574843bf1f" (UID: "ada48417-d933-4feb-a9e6-f4574843bf1f"). InnerVolumeSpecName "kube-api-access-z5gqm". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 09:04:44 crc kubenswrapper[4875]: I1007 09:04:44.527535 4875 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/ada48417-d933-4feb-a9e6-f4574843bf1f-host\") on node \"crc\" DevicePath \"\"" Oct 07 09:04:44 crc kubenswrapper[4875]: I1007 09:04:44.527585 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z5gqm\" (UniqueName: \"kubernetes.io/projected/ada48417-d933-4feb-a9e6-f4574843bf1f-kube-api-access-z5gqm\") on node \"crc\" DevicePath \"\"" Oct 07 09:04:44 crc kubenswrapper[4875]: I1007 09:04:44.882521 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_90bf58ce7772111da4507f6aa87e868c7b0c73a47911b20bcd38811633gjmhr_5a54ab75-829f-4945-9c7e-c4566c15b8e2/util/0.log" Oct 07 09:04:45 crc kubenswrapper[4875]: I1007 09:04:45.038121 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_90bf58ce7772111da4507f6aa87e868c7b0c73a47911b20bcd38811633gjmhr_5a54ab75-829f-4945-9c7e-c4566c15b8e2/pull/0.log" Oct 07 09:04:45 crc kubenswrapper[4875]: I1007 09:04:45.071600 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_90bf58ce7772111da4507f6aa87e868c7b0c73a47911b20bcd38811633gjmhr_5a54ab75-829f-4945-9c7e-c4566c15b8e2/util/0.log" Oct 07 09:04:45 crc kubenswrapper[4875]: I1007 09:04:45.080891 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_90bf58ce7772111da4507f6aa87e868c7b0c73a47911b20bcd38811633gjmhr_5a54ab75-829f-4945-9c7e-c4566c15b8e2/pull/0.log" Oct 07 09:04:45 crc kubenswrapper[4875]: I1007 09:04:45.212239 4875 scope.go:117] "RemoveContainer" containerID="079213f98e86b06c8c7e0b198830800c831a02c27abf581d63ef4bf332c92031" Oct 07 09:04:45 crc kubenswrapper[4875]: I1007 09:04:45.212282 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-xfnzz/crc-debug-mkzlg" Oct 07 09:04:45 crc kubenswrapper[4875]: I1007 09:04:45.271847 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_90bf58ce7772111da4507f6aa87e868c7b0c73a47911b20bcd38811633gjmhr_5a54ab75-829f-4945-9c7e-c4566c15b8e2/pull/0.log" Oct 07 09:04:45 crc kubenswrapper[4875]: I1007 09:04:45.278579 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_90bf58ce7772111da4507f6aa87e868c7b0c73a47911b20bcd38811633gjmhr_5a54ab75-829f-4945-9c7e-c4566c15b8e2/extract/0.log" Oct 07 09:04:45 crc kubenswrapper[4875]: I1007 09:04:45.313777 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_90bf58ce7772111da4507f6aa87e868c7b0c73a47911b20bcd38811633gjmhr_5a54ab75-829f-4945-9c7e-c4566c15b8e2/util/0.log" Oct 07 09:04:45 crc kubenswrapper[4875]: I1007 09:04:45.438454 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-58c4cd55f4-tslhs_cfd6de96-f3bd-4f0d-a076-f7c748b9b6a5/kube-rbac-proxy/0.log" Oct 07 09:04:45 crc kubenswrapper[4875]: I1007 09:04:45.529665 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-58c4cd55f4-tslhs_cfd6de96-f3bd-4f0d-a076-f7c748b9b6a5/manager/0.log" Oct 07 09:04:45 crc kubenswrapper[4875]: I1007 09:04:45.549104 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-7d4d4f8d-8chkp_ecbab85b-669d-4669-aa95-597dc630b7e6/kube-rbac-proxy/0.log" Oct 07 09:04:45 crc kubenswrapper[4875]: I1007 09:04:45.687040 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-7d4d4f8d-8chkp_ecbab85b-669d-4669-aa95-597dc630b7e6/manager/0.log" Oct 07 09:04:45 crc kubenswrapper[4875]: I1007 09:04:45.753688 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ada48417-d933-4feb-a9e6-f4574843bf1f" path="/var/lib/kubelet/pods/ada48417-d933-4feb-a9e6-f4574843bf1f/volumes" Oct 07 09:04:45 crc kubenswrapper[4875]: I1007 09:04:45.756130 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-75dfd9b554-n9gbs_0ef97224-fe40-43a3-af95-f4e1986b8fbe/manager/0.log" Oct 07 09:04:45 crc kubenswrapper[4875]: I1007 09:04:45.798225 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-75dfd9b554-n9gbs_0ef97224-fe40-43a3-af95-f4e1986b8fbe/kube-rbac-proxy/0.log" Oct 07 09:04:45 crc kubenswrapper[4875]: I1007 09:04:45.943589 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-5dc44df7d5-zpbdj_b9617ed2-0ac9-45b8-8089-1091ff8937dd/kube-rbac-proxy/0.log" Oct 07 09:04:46 crc kubenswrapper[4875]: I1007 09:04:46.049979 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-5dc44df7d5-zpbdj_b9617ed2-0ac9-45b8-8089-1091ff8937dd/manager/0.log" Oct 07 09:04:46 crc kubenswrapper[4875]: I1007 09:04:46.152784 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-54b4974c45-t97rc_0c4de86e-6c46-4292-8c13-faeff0997ac4/manager/0.log" Oct 07 09:04:46 crc kubenswrapper[4875]: I1007 09:04:46.153013 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-54b4974c45-t97rc_0c4de86e-6c46-4292-8c13-faeff0997ac4/kube-rbac-proxy/0.log" Oct 07 09:04:46 crc kubenswrapper[4875]: I1007 09:04:46.251304 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-76d5b87f47-g6cn6_52379554-390c-4cb2-97ae-0cb0596f36d1/kube-rbac-proxy/0.log" Oct 07 09:04:46 crc kubenswrapper[4875]: I1007 09:04:46.373236 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-76d5b87f47-g6cn6_52379554-390c-4cb2-97ae-0cb0596f36d1/manager/0.log" Oct 07 09:04:46 crc kubenswrapper[4875]: I1007 09:04:46.442388 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-658588b8c9-l77jd_0cce3a76-3617-40be-8d2b-b8f9184e6b61/kube-rbac-proxy/0.log" Oct 07 09:04:46 crc kubenswrapper[4875]: I1007 09:04:46.579681 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-649675d675-6vp52_f03c0528-11ac-4b93-8f46-4415192ba694/kube-rbac-proxy/0.log" Oct 07 09:04:46 crc kubenswrapper[4875]: I1007 09:04:46.652592 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-658588b8c9-l77jd_0cce3a76-3617-40be-8d2b-b8f9184e6b61/manager/0.log" Oct 07 09:04:46 crc kubenswrapper[4875]: I1007 09:04:46.655762 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-649675d675-6vp52_f03c0528-11ac-4b93-8f46-4415192ba694/manager/0.log" Oct 07 09:04:46 crc kubenswrapper[4875]: I1007 09:04:46.852046 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-7b5ccf6d9c-lvqtd_ab712745-89ea-43bb-b2d7-7192d3691acf/kube-rbac-proxy/0.log" Oct 07 09:04:46 crc kubenswrapper[4875]: I1007 09:04:46.885384 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-7b5ccf6d9c-lvqtd_ab712745-89ea-43bb-b2d7-7192d3691acf/manager/0.log" Oct 07 09:04:46 crc kubenswrapper[4875]: I1007 09:04:46.982232 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-65d89cfd9f-zwhfp_45a1f3f1-6fbc-47d9-81c4-77e8ede46dd8/kube-rbac-proxy/0.log" Oct 07 09:04:47 crc kubenswrapper[4875]: I1007 09:04:47.008606 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-65d89cfd9f-zwhfp_45a1f3f1-6fbc-47d9-81c4-77e8ede46dd8/manager/0.log" Oct 07 09:04:47 crc kubenswrapper[4875]: I1007 09:04:47.034253 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-6cd6d7bdf5-n4t9j_d9b65818-b46c-4a3d-8ed2-53d04e3dc834/kube-rbac-proxy/0.log" Oct 07 09:04:47 crc kubenswrapper[4875]: I1007 09:04:47.206780 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-6cd6d7bdf5-n4t9j_d9b65818-b46c-4a3d-8ed2-53d04e3dc834/manager/0.log" Oct 07 09:04:47 crc kubenswrapper[4875]: I1007 09:04:47.234897 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-8d984cc4d-jcb62_04963c19-bc7b-41c9-8b48-e3b8653738a8/kube-rbac-proxy/0.log" Oct 07 09:04:47 crc kubenswrapper[4875]: I1007 09:04:47.316365 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-8d984cc4d-jcb62_04963c19-bc7b-41c9-8b48-e3b8653738a8/manager/0.log" Oct 07 09:04:47 crc kubenswrapper[4875]: I1007 09:04:47.388533 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-7c7fc454ff-2s4ks_7b5bb2f7-a322-4af4-81dc-d9b104b2bb85/kube-rbac-proxy/0.log" Oct 07 09:04:47 crc kubenswrapper[4875]: I1007 09:04:47.513068 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-7c7fc454ff-2s4ks_7b5bb2f7-a322-4af4-81dc-d9b104b2bb85/manager/0.log" Oct 07 09:04:47 crc kubenswrapper[4875]: I1007 09:04:47.619305 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-7468f855d8-wr55g_d848105d-5f25-435d-bc92-fc6f9eac9749/manager/0.log" Oct 07 09:04:47 crc kubenswrapper[4875]: I1007 09:04:47.625906 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-7468f855d8-wr55g_d848105d-5f25-435d-bc92-fc6f9eac9749/kube-rbac-proxy/0.log" Oct 07 09:04:47 crc kubenswrapper[4875]: I1007 09:04:47.772016 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-5dfbbd665cxk99t_052c6076-1098-41ac-a80d-11879a2e08bc/kube-rbac-proxy/0.log" Oct 07 09:04:47 crc kubenswrapper[4875]: I1007 09:04:47.834035 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-5dfbbd665cxk99t_052c6076-1098-41ac-a80d-11879a2e08bc/manager/0.log" Oct 07 09:04:47 crc kubenswrapper[4875]: I1007 09:04:47.948212 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-667f8c4c67-cltql_1f9b6e86-8f57-4622-bc91-b062f04ec29f/kube-rbac-proxy/0.log" Oct 07 09:04:48 crc kubenswrapper[4875]: I1007 09:04:48.094499 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-fb44c8bf6-4qlwf_4180c049-4ede-4df6-929d-4a3250404f38/kube-rbac-proxy/0.log" Oct 07 09:04:48 crc kubenswrapper[4875]: I1007 09:04:48.308492 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-fb44c8bf6-4qlwf_4180c049-4ede-4df6-929d-4a3250404f38/operator/0.log" Oct 07 09:04:48 crc kubenswrapper[4875]: I1007 09:04:48.892848 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-6d8b6f9b9-dtxrv_f5fb651c-9cca-4f7c-9136-37534358a8dd/kube-rbac-proxy/0.log" Oct 07 09:04:48 crc kubenswrapper[4875]: I1007 09:04:48.909027 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-index-l2g5l_a7934996-8cb9-4f65-896e-c9755d8b5712/registry-server/0.log" Oct 07 09:04:49 crc kubenswrapper[4875]: I1007 09:04:49.106195 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-667f8c4c67-cltql_1f9b6e86-8f57-4622-bc91-b062f04ec29f/manager/0.log" Oct 07 09:04:49 crc kubenswrapper[4875]: I1007 09:04:49.142852 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-54689d9f88-8b99x_9e09310b-4437-4e30-881f-ed2dd568aa16/kube-rbac-proxy/0.log" Oct 07 09:04:49 crc kubenswrapper[4875]: I1007 09:04:49.208346 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-6d8b6f9b9-dtxrv_f5fb651c-9cca-4f7c-9136-37534358a8dd/manager/0.log" Oct 07 09:04:49 crc kubenswrapper[4875]: I1007 09:04:49.221569 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-54689d9f88-8b99x_9e09310b-4437-4e30-881f-ed2dd568aa16/manager/0.log" Oct 07 09:04:49 crc kubenswrapper[4875]: I1007 09:04:49.343697 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_rabbitmq-cluster-operator-manager-5f97d8c699-rljtx_1b1c1a64-b0a5-4c2f-a43d-7cde7774094a/operator/0.log" Oct 07 09:04:49 crc kubenswrapper[4875]: I1007 09:04:49.416942 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-6859f9b676-gfr8f_91189044-d565-4a5e-9766-1bd11f300f11/kube-rbac-proxy/0.log" Oct 07 09:04:49 crc kubenswrapper[4875]: I1007 09:04:49.417559 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-6859f9b676-gfr8f_91189044-d565-4a5e-9766-1bd11f300f11/manager/0.log" Oct 07 09:04:49 crc kubenswrapper[4875]: I1007 09:04:49.530714 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-5d4d74dd89-z98x2_4aab0800-b79f-42f6-8d27-ce34e631f086/kube-rbac-proxy/0.log" Oct 07 09:04:49 crc kubenswrapper[4875]: I1007 09:04:49.601114 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-5cd5cb47d7-l8gnr_c57705e3-3b4e-4252-8c8d-0a21084ff5d8/kube-rbac-proxy/0.log" Oct 07 09:04:49 crc kubenswrapper[4875]: I1007 09:04:49.634278 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-5d4d74dd89-z98x2_4aab0800-b79f-42f6-8d27-ce34e631f086/manager/0.log" Oct 07 09:04:49 crc kubenswrapper[4875]: I1007 09:04:49.721159 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-5cd5cb47d7-l8gnr_c57705e3-3b4e-4252-8c8d-0a21084ff5d8/manager/0.log" Oct 07 09:04:49 crc kubenswrapper[4875]: I1007 09:04:49.804567 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-6cbc6dd547-9njzw_d4352aef-6eec-4342-8b1f-67a0bf3459f2/manager/0.log" Oct 07 09:04:49 crc kubenswrapper[4875]: I1007 09:04:49.807354 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-6cbc6dd547-9njzw_d4352aef-6eec-4342-8b1f-67a0bf3459f2/kube-rbac-proxy/0.log" Oct 07 09:05:04 crc kubenswrapper[4875]: I1007 09:05:04.246245 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-wrx6d_e032dc50-8da1-4b34-981a-ec2b162cace7/control-plane-machine-set-operator/0.log" Oct 07 09:05:04 crc kubenswrapper[4875]: I1007 09:05:04.437171 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-zpc28_5679969e-90bf-49f0-b478-7312b6e13a05/kube-rbac-proxy/0.log" Oct 07 09:05:04 crc kubenswrapper[4875]: I1007 09:05:04.456070 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-zpc28_5679969e-90bf-49f0-b478-7312b6e13a05/machine-api-operator/0.log" Oct 07 09:05:15 crc kubenswrapper[4875]: I1007 09:05:15.899225 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-5b446d88c5-4th7n_34bd548d-264b-4fca-8e6e-153a3309bc28/cert-manager-controller/0.log" Oct 07 09:05:16 crc kubenswrapper[4875]: I1007 09:05:16.079122 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-7f985d654d-wgr7v_3d10ad9b-14eb-4da6-a7b9-8ca414305d45/cert-manager-cainjector/0.log" Oct 07 09:05:16 crc kubenswrapper[4875]: I1007 09:05:16.135577 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-5655c58dd6-rpz2k_3876b303-4b50-4e7f-afbc-fad7b7196009/cert-manager-webhook/0.log" Oct 07 09:05:28 crc kubenswrapper[4875]: I1007 09:05:28.015523 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-console-plugin-6b874cbd85-fmrrb_85fcbeae-845a-48a4-8aea-e21d5df0e1fd/nmstate-console-plugin/0.log" Oct 07 09:05:28 crc kubenswrapper[4875]: I1007 09:05:28.168456 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-handler-cjhsf_546057f3-6bcf-4fd9-9c8d-1d21f13a70f8/nmstate-handler/0.log" Oct 07 09:05:28 crc kubenswrapper[4875]: I1007 09:05:28.665070 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-fdff9cb8d-lrxvs_a15028de-2729-40bb-add3-8d042826a0e5/kube-rbac-proxy/0.log" Oct 07 09:05:28 crc kubenswrapper[4875]: I1007 09:05:28.839373 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-fdff9cb8d-lrxvs_a15028de-2729-40bb-add3-8d042826a0e5/nmstate-metrics/0.log" Oct 07 09:05:28 crc kubenswrapper[4875]: I1007 09:05:28.851677 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-operator-858ddd8f98-t9gm5_53a570a0-5097-4c09-af4b-8bca758b17b6/nmstate-operator/0.log" Oct 07 09:05:29 crc kubenswrapper[4875]: I1007 09:05:29.017294 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-webhook-6cdbc54649-sv4nj_9bb1aee8-fddf-47ed-8bf5-a4f7310c6c2f/nmstate-webhook/0.log" Oct 07 09:05:41 crc kubenswrapper[4875]: I1007 09:05:41.307031 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-68d546b9d8-jlztd_90069e5a-5263-4c20-9c21-caa665096b11/kube-rbac-proxy/0.log" Oct 07 09:05:41 crc kubenswrapper[4875]: I1007 09:05:41.455069 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-68d546b9d8-jlztd_90069e5a-5263-4c20-9c21-caa665096b11/controller/0.log" Oct 07 09:05:41 crc kubenswrapper[4875]: I1007 09:05:41.554264 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-s9mlf_a5e85290-9c5d-43cf-b0a8-41c2399d7122/cp-frr-files/0.log" Oct 07 09:05:41 crc kubenswrapper[4875]: I1007 09:05:41.730375 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-s9mlf_a5e85290-9c5d-43cf-b0a8-41c2399d7122/cp-frr-files/0.log" Oct 07 09:05:41 crc kubenswrapper[4875]: I1007 09:05:41.735109 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-s9mlf_a5e85290-9c5d-43cf-b0a8-41c2399d7122/cp-metrics/0.log" Oct 07 09:05:41 crc kubenswrapper[4875]: I1007 09:05:41.767760 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-s9mlf_a5e85290-9c5d-43cf-b0a8-41c2399d7122/cp-reloader/0.log" Oct 07 09:05:41 crc kubenswrapper[4875]: I1007 09:05:41.773448 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-s9mlf_a5e85290-9c5d-43cf-b0a8-41c2399d7122/cp-reloader/0.log" Oct 07 09:05:41 crc kubenswrapper[4875]: I1007 09:05:41.947554 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-s9mlf_a5e85290-9c5d-43cf-b0a8-41c2399d7122/cp-reloader/0.log" Oct 07 09:05:41 crc kubenswrapper[4875]: I1007 09:05:41.966794 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-s9mlf_a5e85290-9c5d-43cf-b0a8-41c2399d7122/cp-metrics/0.log" Oct 07 09:05:41 crc kubenswrapper[4875]: I1007 09:05:41.977714 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-s9mlf_a5e85290-9c5d-43cf-b0a8-41c2399d7122/cp-frr-files/0.log" Oct 07 09:05:42 crc kubenswrapper[4875]: I1007 09:05:42.010040 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-s9mlf_a5e85290-9c5d-43cf-b0a8-41c2399d7122/cp-metrics/0.log" Oct 07 09:05:42 crc kubenswrapper[4875]: I1007 09:05:42.154967 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-s9mlf_a5e85290-9c5d-43cf-b0a8-41c2399d7122/cp-frr-files/0.log" Oct 07 09:05:42 crc kubenswrapper[4875]: I1007 09:05:42.157619 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-s9mlf_a5e85290-9c5d-43cf-b0a8-41c2399d7122/cp-metrics/0.log" Oct 07 09:05:42 crc kubenswrapper[4875]: I1007 09:05:42.163756 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-s9mlf_a5e85290-9c5d-43cf-b0a8-41c2399d7122/cp-reloader/0.log" Oct 07 09:05:42 crc kubenswrapper[4875]: I1007 09:05:42.189480 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-s9mlf_a5e85290-9c5d-43cf-b0a8-41c2399d7122/controller/0.log" Oct 07 09:05:42 crc kubenswrapper[4875]: I1007 09:05:42.332168 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-s9mlf_a5e85290-9c5d-43cf-b0a8-41c2399d7122/frr-metrics/0.log" Oct 07 09:05:42 crc kubenswrapper[4875]: I1007 09:05:42.350592 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-s9mlf_a5e85290-9c5d-43cf-b0a8-41c2399d7122/kube-rbac-proxy/0.log" Oct 07 09:05:42 crc kubenswrapper[4875]: I1007 09:05:42.403379 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-s9mlf_a5e85290-9c5d-43cf-b0a8-41c2399d7122/kube-rbac-proxy-frr/0.log" Oct 07 09:05:42 crc kubenswrapper[4875]: I1007 09:05:42.598051 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-s9mlf_a5e85290-9c5d-43cf-b0a8-41c2399d7122/reloader/0.log" Oct 07 09:05:42 crc kubenswrapper[4875]: I1007 09:05:42.614534 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-64bf5d555-hw72k_2570bc7b-9601-4fe3-8fb7-da9277a529c7/frr-k8s-webhook-server/0.log" Oct 07 09:05:42 crc kubenswrapper[4875]: I1007 09:05:42.821967 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-79ccb7884d-7cp64_497229d2-1650-4976-88c5-24f97a0afdb8/manager/0.log" Oct 07 09:05:43 crc kubenswrapper[4875]: I1007 09:05:43.001239 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-769577ff68-db67z_f34be03b-6e0c-40e3-99b9-3b1dbe22e40a/webhook-server/0.log" Oct 07 09:05:43 crc kubenswrapper[4875]: I1007 09:05:43.029365 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-xf2p2_dd7e9a4a-b7f9-4f1d-8268-5ec15e5152f7/kube-rbac-proxy/0.log" Oct 07 09:05:43 crc kubenswrapper[4875]: I1007 09:05:43.635246 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-xf2p2_dd7e9a4a-b7f9-4f1d-8268-5ec15e5152f7/speaker/0.log" Oct 07 09:05:43 crc kubenswrapper[4875]: I1007 09:05:43.987573 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-s9mlf_a5e85290-9c5d-43cf-b0a8-41c2399d7122/frr/0.log" Oct 07 09:05:55 crc kubenswrapper[4875]: I1007 09:05:55.266561 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2kt7sn_d6b0d9bd-27b9-41ec-bc0e-87a065c184ce/util/0.log" Oct 07 09:05:55 crc kubenswrapper[4875]: I1007 09:05:55.490465 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2kt7sn_d6b0d9bd-27b9-41ec-bc0e-87a065c184ce/pull/0.log" Oct 07 09:05:55 crc kubenswrapper[4875]: I1007 09:05:55.496518 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2kt7sn_d6b0d9bd-27b9-41ec-bc0e-87a065c184ce/util/0.log" Oct 07 09:05:55 crc kubenswrapper[4875]: I1007 09:05:55.513067 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2kt7sn_d6b0d9bd-27b9-41ec-bc0e-87a065c184ce/pull/0.log" Oct 07 09:05:55 crc kubenswrapper[4875]: I1007 09:05:55.711416 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2kt7sn_d6b0d9bd-27b9-41ec-bc0e-87a065c184ce/pull/0.log" Oct 07 09:05:55 crc kubenswrapper[4875]: I1007 09:05:55.730064 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2kt7sn_d6b0d9bd-27b9-41ec-bc0e-87a065c184ce/util/0.log" Oct 07 09:05:55 crc kubenswrapper[4875]: I1007 09:05:55.740639 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2kt7sn_d6b0d9bd-27b9-41ec-bc0e-87a065c184ce/extract/0.log" Oct 07 09:05:55 crc kubenswrapper[4875]: I1007 09:05:55.896247 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-ndzbj_a0195b31-f7a7-4146-8e5c-a11661371eda/extract-utilities/0.log" Oct 07 09:05:56 crc kubenswrapper[4875]: I1007 09:05:56.388386 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-ndzbj_a0195b31-f7a7-4146-8e5c-a11661371eda/extract-content/0.log" Oct 07 09:05:56 crc kubenswrapper[4875]: I1007 09:05:56.392059 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-ndzbj_a0195b31-f7a7-4146-8e5c-a11661371eda/extract-utilities/0.log" Oct 07 09:05:56 crc kubenswrapper[4875]: I1007 09:05:56.401857 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-ndzbj_a0195b31-f7a7-4146-8e5c-a11661371eda/extract-content/0.log" Oct 07 09:05:56 crc kubenswrapper[4875]: I1007 09:05:56.609453 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-ndzbj_a0195b31-f7a7-4146-8e5c-a11661371eda/extract-utilities/0.log" Oct 07 09:05:56 crc kubenswrapper[4875]: I1007 09:05:56.628983 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-ndzbj_a0195b31-f7a7-4146-8e5c-a11661371eda/extract-content/0.log" Oct 07 09:05:56 crc kubenswrapper[4875]: I1007 09:05:56.790942 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-9fmxk_a5460abe-f279-4a96-bb7b-6389750640bb/extract-utilities/0.log" Oct 07 09:05:57 crc kubenswrapper[4875]: I1007 09:05:57.059469 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-9fmxk_a5460abe-f279-4a96-bb7b-6389750640bb/extract-content/0.log" Oct 07 09:05:57 crc kubenswrapper[4875]: I1007 09:05:57.105220 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-9fmxk_a5460abe-f279-4a96-bb7b-6389750640bb/extract-content/0.log" Oct 07 09:05:57 crc kubenswrapper[4875]: I1007 09:05:57.110902 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-9fmxk_a5460abe-f279-4a96-bb7b-6389750640bb/extract-utilities/0.log" Oct 07 09:05:57 crc kubenswrapper[4875]: I1007 09:05:57.213346 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-ndzbj_a0195b31-f7a7-4146-8e5c-a11661371eda/registry-server/0.log" Oct 07 09:05:57 crc kubenswrapper[4875]: I1007 09:05:57.337609 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-9fmxk_a5460abe-f279-4a96-bb7b-6389750640bb/extract-utilities/0.log" Oct 07 09:05:57 crc kubenswrapper[4875]: I1007 09:05:57.338586 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-9fmxk_a5460abe-f279-4a96-bb7b-6389750640bb/extract-content/0.log" Oct 07 09:05:57 crc kubenswrapper[4875]: I1007 09:05:57.528205 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cg95v6_5e2cee0f-3bcf-478e-9c4c-650f621b6856/util/0.log" Oct 07 09:05:58 crc kubenswrapper[4875]: I1007 09:05:58.034634 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-9fmxk_a5460abe-f279-4a96-bb7b-6389750640bb/registry-server/0.log" Oct 07 09:05:58 crc kubenswrapper[4875]: I1007 09:05:58.279787 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cg95v6_5e2cee0f-3bcf-478e-9c4c-650f621b6856/util/0.log" Oct 07 09:05:58 crc kubenswrapper[4875]: I1007 09:05:58.386967 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cg95v6_5e2cee0f-3bcf-478e-9c4c-650f621b6856/pull/0.log" Oct 07 09:05:58 crc kubenswrapper[4875]: I1007 09:05:58.418717 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cg95v6_5e2cee0f-3bcf-478e-9c4c-650f621b6856/pull/0.log" Oct 07 09:05:58 crc kubenswrapper[4875]: I1007 09:05:58.601195 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cg95v6_5e2cee0f-3bcf-478e-9c4c-650f621b6856/util/0.log" Oct 07 09:05:58 crc kubenswrapper[4875]: I1007 09:05:58.620646 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cg95v6_5e2cee0f-3bcf-478e-9c4c-650f621b6856/pull/0.log" Oct 07 09:05:58 crc kubenswrapper[4875]: I1007 09:05:58.640647 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cg95v6_5e2cee0f-3bcf-478e-9c4c-650f621b6856/extract/0.log" Oct 07 09:05:58 crc kubenswrapper[4875]: I1007 09:05:58.781738 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-kjd46_193bb790-d155-4aee-9f31-41b457c429ca/marketplace-operator/0.log" Oct 07 09:05:58 crc kubenswrapper[4875]: I1007 09:05:58.841486 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-jbrrn_bcc94b2c-ef4b-4a9b-ba01-17f23df3b946/extract-utilities/0.log" Oct 07 09:05:58 crc kubenswrapper[4875]: I1007 09:05:58.979279 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-jbrrn_bcc94b2c-ef4b-4a9b-ba01-17f23df3b946/extract-utilities/0.log" Oct 07 09:05:58 crc kubenswrapper[4875]: I1007 09:05:58.985693 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-jbrrn_bcc94b2c-ef4b-4a9b-ba01-17f23df3b946/extract-content/0.log" Oct 07 09:05:59 crc kubenswrapper[4875]: I1007 09:05:59.013528 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-jbrrn_bcc94b2c-ef4b-4a9b-ba01-17f23df3b946/extract-content/0.log" Oct 07 09:05:59 crc kubenswrapper[4875]: I1007 09:05:59.195648 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-jbrrn_bcc94b2c-ef4b-4a9b-ba01-17f23df3b946/extract-content/0.log" Oct 07 09:05:59 crc kubenswrapper[4875]: I1007 09:05:59.233858 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-jbrrn_bcc94b2c-ef4b-4a9b-ba01-17f23df3b946/extract-utilities/0.log" Oct 07 09:05:59 crc kubenswrapper[4875]: I1007 09:05:59.259375 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-fpwf5_92781bfa-9f68-4be9-a393-9a40fa2b4e52/extract-utilities/0.log" Oct 07 09:05:59 crc kubenswrapper[4875]: I1007 09:05:59.286579 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-jbrrn_bcc94b2c-ef4b-4a9b-ba01-17f23df3b946/registry-server/0.log" Oct 07 09:05:59 crc kubenswrapper[4875]: I1007 09:05:59.445827 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-fpwf5_92781bfa-9f68-4be9-a393-9a40fa2b4e52/extract-content/0.log" Oct 07 09:05:59 crc kubenswrapper[4875]: I1007 09:05:59.445997 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-fpwf5_92781bfa-9f68-4be9-a393-9a40fa2b4e52/extract-utilities/0.log" Oct 07 09:05:59 crc kubenswrapper[4875]: I1007 09:05:59.470307 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-fpwf5_92781bfa-9f68-4be9-a393-9a40fa2b4e52/extract-content/0.log" Oct 07 09:05:59 crc kubenswrapper[4875]: I1007 09:05:59.673472 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-fpwf5_92781bfa-9f68-4be9-a393-9a40fa2b4e52/extract-content/0.log" Oct 07 09:05:59 crc kubenswrapper[4875]: I1007 09:05:59.701207 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-fpwf5_92781bfa-9f68-4be9-a393-9a40fa2b4e52/extract-utilities/0.log" Oct 07 09:05:59 crc kubenswrapper[4875]: I1007 09:05:59.734173 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-fpwf5_92781bfa-9f68-4be9-a393-9a40fa2b4e52/registry-server/0.log" Oct 07 09:06:31 crc kubenswrapper[4875]: I1007 09:06:31.221160 4875 patch_prober.go:28] interesting pod/machine-config-daemon-hx68m container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 07 09:06:31 crc kubenswrapper[4875]: I1007 09:06:31.221699 4875 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" podUID="3928c10c-c3da-41eb-96b2-629d67cfb31f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 07 09:06:59 crc kubenswrapper[4875]: I1007 09:06:59.273722 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-rdjrl"] Oct 07 09:06:59 crc kubenswrapper[4875]: E1007 09:06:59.274795 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ada48417-d933-4feb-a9e6-f4574843bf1f" containerName="container-00" Oct 07 09:06:59 crc kubenswrapper[4875]: I1007 09:06:59.274812 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="ada48417-d933-4feb-a9e6-f4574843bf1f" containerName="container-00" Oct 07 09:06:59 crc kubenswrapper[4875]: I1007 09:06:59.275130 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="ada48417-d933-4feb-a9e6-f4574843bf1f" containerName="container-00" Oct 07 09:06:59 crc kubenswrapper[4875]: I1007 09:06:59.279124 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-rdjrl" Oct 07 09:06:59 crc kubenswrapper[4875]: I1007 09:06:59.297471 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-rdjrl"] Oct 07 09:06:59 crc kubenswrapper[4875]: I1007 09:06:59.367220 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9518cf99-8e53-4b9b-bb4b-0dccb35b4a0d-utilities\") pod \"redhat-marketplace-rdjrl\" (UID: \"9518cf99-8e53-4b9b-bb4b-0dccb35b4a0d\") " pod="openshift-marketplace/redhat-marketplace-rdjrl" Oct 07 09:06:59 crc kubenswrapper[4875]: I1007 09:06:59.367297 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9518cf99-8e53-4b9b-bb4b-0dccb35b4a0d-catalog-content\") pod \"redhat-marketplace-rdjrl\" (UID: \"9518cf99-8e53-4b9b-bb4b-0dccb35b4a0d\") " pod="openshift-marketplace/redhat-marketplace-rdjrl" Oct 07 09:06:59 crc kubenswrapper[4875]: I1007 09:06:59.367380 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b9c6c\" (UniqueName: \"kubernetes.io/projected/9518cf99-8e53-4b9b-bb4b-0dccb35b4a0d-kube-api-access-b9c6c\") pod \"redhat-marketplace-rdjrl\" (UID: \"9518cf99-8e53-4b9b-bb4b-0dccb35b4a0d\") " pod="openshift-marketplace/redhat-marketplace-rdjrl" Oct 07 09:06:59 crc kubenswrapper[4875]: I1007 09:06:59.469314 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b9c6c\" (UniqueName: \"kubernetes.io/projected/9518cf99-8e53-4b9b-bb4b-0dccb35b4a0d-kube-api-access-b9c6c\") pod \"redhat-marketplace-rdjrl\" (UID: \"9518cf99-8e53-4b9b-bb4b-0dccb35b4a0d\") " pod="openshift-marketplace/redhat-marketplace-rdjrl" Oct 07 09:06:59 crc kubenswrapper[4875]: I1007 09:06:59.469585 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9518cf99-8e53-4b9b-bb4b-0dccb35b4a0d-utilities\") pod \"redhat-marketplace-rdjrl\" (UID: \"9518cf99-8e53-4b9b-bb4b-0dccb35b4a0d\") " pod="openshift-marketplace/redhat-marketplace-rdjrl" Oct 07 09:06:59 crc kubenswrapper[4875]: I1007 09:06:59.469706 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9518cf99-8e53-4b9b-bb4b-0dccb35b4a0d-catalog-content\") pod \"redhat-marketplace-rdjrl\" (UID: \"9518cf99-8e53-4b9b-bb4b-0dccb35b4a0d\") " pod="openshift-marketplace/redhat-marketplace-rdjrl" Oct 07 09:06:59 crc kubenswrapper[4875]: I1007 09:06:59.470261 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9518cf99-8e53-4b9b-bb4b-0dccb35b4a0d-utilities\") pod \"redhat-marketplace-rdjrl\" (UID: \"9518cf99-8e53-4b9b-bb4b-0dccb35b4a0d\") " pod="openshift-marketplace/redhat-marketplace-rdjrl" Oct 07 09:06:59 crc kubenswrapper[4875]: I1007 09:06:59.470267 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9518cf99-8e53-4b9b-bb4b-0dccb35b4a0d-catalog-content\") pod \"redhat-marketplace-rdjrl\" (UID: \"9518cf99-8e53-4b9b-bb4b-0dccb35b4a0d\") " pod="openshift-marketplace/redhat-marketplace-rdjrl" Oct 07 09:06:59 crc kubenswrapper[4875]: I1007 09:06:59.496860 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b9c6c\" (UniqueName: \"kubernetes.io/projected/9518cf99-8e53-4b9b-bb4b-0dccb35b4a0d-kube-api-access-b9c6c\") pod \"redhat-marketplace-rdjrl\" (UID: \"9518cf99-8e53-4b9b-bb4b-0dccb35b4a0d\") " pod="openshift-marketplace/redhat-marketplace-rdjrl" Oct 07 09:06:59 crc kubenswrapper[4875]: I1007 09:06:59.631174 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-rdjrl" Oct 07 09:07:00 crc kubenswrapper[4875]: I1007 09:07:00.071949 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-rdjrl"] Oct 07 09:07:00 crc kubenswrapper[4875]: I1007 09:07:00.382900 4875 generic.go:334] "Generic (PLEG): container finished" podID="9518cf99-8e53-4b9b-bb4b-0dccb35b4a0d" containerID="3c7b3c20497cc3b0d656baa00f18ed1e03a466e185f4333e50ac3c9966a5b0cb" exitCode=0 Oct 07 09:07:00 crc kubenswrapper[4875]: I1007 09:07:00.382945 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rdjrl" event={"ID":"9518cf99-8e53-4b9b-bb4b-0dccb35b4a0d","Type":"ContainerDied","Data":"3c7b3c20497cc3b0d656baa00f18ed1e03a466e185f4333e50ac3c9966a5b0cb"} Oct 07 09:07:00 crc kubenswrapper[4875]: I1007 09:07:00.382971 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rdjrl" event={"ID":"9518cf99-8e53-4b9b-bb4b-0dccb35b4a0d","Type":"ContainerStarted","Data":"b3b3341e3da5e912f3a86ed9bab25b69fbfdedf802e4e0171d34ea677c07f705"} Oct 07 09:07:00 crc kubenswrapper[4875]: I1007 09:07:00.384810 4875 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 07 09:07:01 crc kubenswrapper[4875]: I1007 09:07:01.221227 4875 patch_prober.go:28] interesting pod/machine-config-daemon-hx68m container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 07 09:07:01 crc kubenswrapper[4875]: I1007 09:07:01.221567 4875 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" podUID="3928c10c-c3da-41eb-96b2-629d67cfb31f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 07 09:07:01 crc kubenswrapper[4875]: I1007 09:07:01.394166 4875 generic.go:334] "Generic (PLEG): container finished" podID="9518cf99-8e53-4b9b-bb4b-0dccb35b4a0d" containerID="fdf7ac45cb56c64cf2a6c414a027bc5b48517a13afcd71212706e4ee946caf4c" exitCode=0 Oct 07 09:07:01 crc kubenswrapper[4875]: I1007 09:07:01.394216 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rdjrl" event={"ID":"9518cf99-8e53-4b9b-bb4b-0dccb35b4a0d","Type":"ContainerDied","Data":"fdf7ac45cb56c64cf2a6c414a027bc5b48517a13afcd71212706e4ee946caf4c"} Oct 07 09:07:02 crc kubenswrapper[4875]: I1007 09:07:02.403793 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rdjrl" event={"ID":"9518cf99-8e53-4b9b-bb4b-0dccb35b4a0d","Type":"ContainerStarted","Data":"68f739d610461e2fa8d92a6a517d18e1e5e05844473b849e0ed079c7b1899478"} Oct 07 09:07:02 crc kubenswrapper[4875]: I1007 09:07:02.425906 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-rdjrl" podStartSLOduration=1.9515017970000001 podStartE2EDuration="3.42586723s" podCreationTimestamp="2025-10-07 09:06:59 +0000 UTC" firstStartedPulling="2025-10-07 09:07:00.384563739 +0000 UTC m=+4245.344334282" lastFinishedPulling="2025-10-07 09:07:01.858929172 +0000 UTC m=+4246.818699715" observedRunningTime="2025-10-07 09:07:02.421309894 +0000 UTC m=+4247.381080447" watchObservedRunningTime="2025-10-07 09:07:02.42586723 +0000 UTC m=+4247.385637773" Oct 07 09:07:09 crc kubenswrapper[4875]: I1007 09:07:09.632108 4875 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-rdjrl" Oct 07 09:07:09 crc kubenswrapper[4875]: I1007 09:07:09.634508 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-rdjrl" Oct 07 09:07:09 crc kubenswrapper[4875]: I1007 09:07:09.684081 4875 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-rdjrl" Oct 07 09:07:10 crc kubenswrapper[4875]: I1007 09:07:10.535762 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-rdjrl" Oct 07 09:07:10 crc kubenswrapper[4875]: I1007 09:07:10.576256 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-rdjrl"] Oct 07 09:07:12 crc kubenswrapper[4875]: I1007 09:07:12.508425 4875 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-rdjrl" podUID="9518cf99-8e53-4b9b-bb4b-0dccb35b4a0d" containerName="registry-server" containerID="cri-o://68f739d610461e2fa8d92a6a517d18e1e5e05844473b849e0ed079c7b1899478" gracePeriod=2 Oct 07 09:07:13 crc kubenswrapper[4875]: I1007 09:07:13.488271 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-rdjrl" Oct 07 09:07:13 crc kubenswrapper[4875]: I1007 09:07:13.524957 4875 generic.go:334] "Generic (PLEG): container finished" podID="9518cf99-8e53-4b9b-bb4b-0dccb35b4a0d" containerID="68f739d610461e2fa8d92a6a517d18e1e5e05844473b849e0ed079c7b1899478" exitCode=0 Oct 07 09:07:13 crc kubenswrapper[4875]: I1007 09:07:13.525000 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rdjrl" event={"ID":"9518cf99-8e53-4b9b-bb4b-0dccb35b4a0d","Type":"ContainerDied","Data":"68f739d610461e2fa8d92a6a517d18e1e5e05844473b849e0ed079c7b1899478"} Oct 07 09:07:13 crc kubenswrapper[4875]: I1007 09:07:13.525030 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rdjrl" event={"ID":"9518cf99-8e53-4b9b-bb4b-0dccb35b4a0d","Type":"ContainerDied","Data":"b3b3341e3da5e912f3a86ed9bab25b69fbfdedf802e4e0171d34ea677c07f705"} Oct 07 09:07:13 crc kubenswrapper[4875]: I1007 09:07:13.525050 4875 scope.go:117] "RemoveContainer" containerID="68f739d610461e2fa8d92a6a517d18e1e5e05844473b849e0ed079c7b1899478" Oct 07 09:07:13 crc kubenswrapper[4875]: I1007 09:07:13.525064 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-rdjrl" Oct 07 09:07:13 crc kubenswrapper[4875]: I1007 09:07:13.553759 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9518cf99-8e53-4b9b-bb4b-0dccb35b4a0d-catalog-content\") pod \"9518cf99-8e53-4b9b-bb4b-0dccb35b4a0d\" (UID: \"9518cf99-8e53-4b9b-bb4b-0dccb35b4a0d\") " Oct 07 09:07:13 crc kubenswrapper[4875]: I1007 09:07:13.553989 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9518cf99-8e53-4b9b-bb4b-0dccb35b4a0d-utilities\") pod \"9518cf99-8e53-4b9b-bb4b-0dccb35b4a0d\" (UID: \"9518cf99-8e53-4b9b-bb4b-0dccb35b4a0d\") " Oct 07 09:07:13 crc kubenswrapper[4875]: I1007 09:07:13.554038 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b9c6c\" (UniqueName: \"kubernetes.io/projected/9518cf99-8e53-4b9b-bb4b-0dccb35b4a0d-kube-api-access-b9c6c\") pod \"9518cf99-8e53-4b9b-bb4b-0dccb35b4a0d\" (UID: \"9518cf99-8e53-4b9b-bb4b-0dccb35b4a0d\") " Oct 07 09:07:13 crc kubenswrapper[4875]: I1007 09:07:13.554908 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9518cf99-8e53-4b9b-bb4b-0dccb35b4a0d-utilities" (OuterVolumeSpecName: "utilities") pod "9518cf99-8e53-4b9b-bb4b-0dccb35b4a0d" (UID: "9518cf99-8e53-4b9b-bb4b-0dccb35b4a0d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 09:07:13 crc kubenswrapper[4875]: I1007 09:07:13.555000 4875 scope.go:117] "RemoveContainer" containerID="fdf7ac45cb56c64cf2a6c414a027bc5b48517a13afcd71212706e4ee946caf4c" Oct 07 09:07:13 crc kubenswrapper[4875]: I1007 09:07:13.560511 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9518cf99-8e53-4b9b-bb4b-0dccb35b4a0d-kube-api-access-b9c6c" (OuterVolumeSpecName: "kube-api-access-b9c6c") pod "9518cf99-8e53-4b9b-bb4b-0dccb35b4a0d" (UID: "9518cf99-8e53-4b9b-bb4b-0dccb35b4a0d"). InnerVolumeSpecName "kube-api-access-b9c6c". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 09:07:13 crc kubenswrapper[4875]: I1007 09:07:13.567617 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9518cf99-8e53-4b9b-bb4b-0dccb35b4a0d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "9518cf99-8e53-4b9b-bb4b-0dccb35b4a0d" (UID: "9518cf99-8e53-4b9b-bb4b-0dccb35b4a0d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 09:07:13 crc kubenswrapper[4875]: I1007 09:07:13.631453 4875 scope.go:117] "RemoveContainer" containerID="3c7b3c20497cc3b0d656baa00f18ed1e03a466e185f4333e50ac3c9966a5b0cb" Oct 07 09:07:13 crc kubenswrapper[4875]: I1007 09:07:13.667137 4875 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9518cf99-8e53-4b9b-bb4b-0dccb35b4a0d-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 07 09:07:13 crc kubenswrapper[4875]: I1007 09:07:13.667170 4875 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9518cf99-8e53-4b9b-bb4b-0dccb35b4a0d-utilities\") on node \"crc\" DevicePath \"\"" Oct 07 09:07:13 crc kubenswrapper[4875]: I1007 09:07:13.667184 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b9c6c\" (UniqueName: \"kubernetes.io/projected/9518cf99-8e53-4b9b-bb4b-0dccb35b4a0d-kube-api-access-b9c6c\") on node \"crc\" DevicePath \"\"" Oct 07 09:07:13 crc kubenswrapper[4875]: I1007 09:07:13.688672 4875 scope.go:117] "RemoveContainer" containerID="68f739d610461e2fa8d92a6a517d18e1e5e05844473b849e0ed079c7b1899478" Oct 07 09:07:13 crc kubenswrapper[4875]: E1007 09:07:13.689241 4875 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"68f739d610461e2fa8d92a6a517d18e1e5e05844473b849e0ed079c7b1899478\": container with ID starting with 68f739d610461e2fa8d92a6a517d18e1e5e05844473b849e0ed079c7b1899478 not found: ID does not exist" containerID="68f739d610461e2fa8d92a6a517d18e1e5e05844473b849e0ed079c7b1899478" Oct 07 09:07:13 crc kubenswrapper[4875]: I1007 09:07:13.689303 4875 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"68f739d610461e2fa8d92a6a517d18e1e5e05844473b849e0ed079c7b1899478"} err="failed to get container status \"68f739d610461e2fa8d92a6a517d18e1e5e05844473b849e0ed079c7b1899478\": rpc error: code = NotFound desc = could not find container \"68f739d610461e2fa8d92a6a517d18e1e5e05844473b849e0ed079c7b1899478\": container with ID starting with 68f739d610461e2fa8d92a6a517d18e1e5e05844473b849e0ed079c7b1899478 not found: ID does not exist" Oct 07 09:07:13 crc kubenswrapper[4875]: I1007 09:07:13.689331 4875 scope.go:117] "RemoveContainer" containerID="fdf7ac45cb56c64cf2a6c414a027bc5b48517a13afcd71212706e4ee946caf4c" Oct 07 09:07:13 crc kubenswrapper[4875]: E1007 09:07:13.689728 4875 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fdf7ac45cb56c64cf2a6c414a027bc5b48517a13afcd71212706e4ee946caf4c\": container with ID starting with fdf7ac45cb56c64cf2a6c414a027bc5b48517a13afcd71212706e4ee946caf4c not found: ID does not exist" containerID="fdf7ac45cb56c64cf2a6c414a027bc5b48517a13afcd71212706e4ee946caf4c" Oct 07 09:07:13 crc kubenswrapper[4875]: I1007 09:07:13.689766 4875 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fdf7ac45cb56c64cf2a6c414a027bc5b48517a13afcd71212706e4ee946caf4c"} err="failed to get container status \"fdf7ac45cb56c64cf2a6c414a027bc5b48517a13afcd71212706e4ee946caf4c\": rpc error: code = NotFound desc = could not find container \"fdf7ac45cb56c64cf2a6c414a027bc5b48517a13afcd71212706e4ee946caf4c\": container with ID starting with fdf7ac45cb56c64cf2a6c414a027bc5b48517a13afcd71212706e4ee946caf4c not found: ID does not exist" Oct 07 09:07:13 crc kubenswrapper[4875]: I1007 09:07:13.689793 4875 scope.go:117] "RemoveContainer" containerID="3c7b3c20497cc3b0d656baa00f18ed1e03a466e185f4333e50ac3c9966a5b0cb" Oct 07 09:07:13 crc kubenswrapper[4875]: E1007 09:07:13.691255 4875 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3c7b3c20497cc3b0d656baa00f18ed1e03a466e185f4333e50ac3c9966a5b0cb\": container with ID starting with 3c7b3c20497cc3b0d656baa00f18ed1e03a466e185f4333e50ac3c9966a5b0cb not found: ID does not exist" containerID="3c7b3c20497cc3b0d656baa00f18ed1e03a466e185f4333e50ac3c9966a5b0cb" Oct 07 09:07:13 crc kubenswrapper[4875]: I1007 09:07:13.691304 4875 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3c7b3c20497cc3b0d656baa00f18ed1e03a466e185f4333e50ac3c9966a5b0cb"} err="failed to get container status \"3c7b3c20497cc3b0d656baa00f18ed1e03a466e185f4333e50ac3c9966a5b0cb\": rpc error: code = NotFound desc = could not find container \"3c7b3c20497cc3b0d656baa00f18ed1e03a466e185f4333e50ac3c9966a5b0cb\": container with ID starting with 3c7b3c20497cc3b0d656baa00f18ed1e03a466e185f4333e50ac3c9966a5b0cb not found: ID does not exist" Oct 07 09:07:13 crc kubenswrapper[4875]: I1007 09:07:13.844474 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-rdjrl"] Oct 07 09:07:13 crc kubenswrapper[4875]: I1007 09:07:13.854264 4875 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-rdjrl"] Oct 07 09:07:15 crc kubenswrapper[4875]: I1007 09:07:15.726914 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9518cf99-8e53-4b9b-bb4b-0dccb35b4a0d" path="/var/lib/kubelet/pods/9518cf99-8e53-4b9b-bb4b-0dccb35b4a0d/volumes" Oct 07 09:07:31 crc kubenswrapper[4875]: I1007 09:07:31.221352 4875 patch_prober.go:28] interesting pod/machine-config-daemon-hx68m container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 07 09:07:31 crc kubenswrapper[4875]: I1007 09:07:31.223034 4875 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" podUID="3928c10c-c3da-41eb-96b2-629d67cfb31f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 07 09:07:31 crc kubenswrapper[4875]: I1007 09:07:31.223428 4875 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" Oct 07 09:07:31 crc kubenswrapper[4875]: I1007 09:07:31.224162 4875 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"21bf6bf5228a798ba24e02287093bd82c692d5a50ac902514b7d5b0597423fe9"} pod="openshift-machine-config-operator/machine-config-daemon-hx68m" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 07 09:07:31 crc kubenswrapper[4875]: I1007 09:07:31.224296 4875 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" podUID="3928c10c-c3da-41eb-96b2-629d67cfb31f" containerName="machine-config-daemon" containerID="cri-o://21bf6bf5228a798ba24e02287093bd82c692d5a50ac902514b7d5b0597423fe9" gracePeriod=600 Oct 07 09:07:31 crc kubenswrapper[4875]: E1007 09:07:31.346868 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hx68m_openshift-machine-config-operator(3928c10c-c3da-41eb-96b2-629d67cfb31f)\"" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" podUID="3928c10c-c3da-41eb-96b2-629d67cfb31f" Oct 07 09:07:31 crc kubenswrapper[4875]: I1007 09:07:31.679805 4875 generic.go:334] "Generic (PLEG): container finished" podID="3928c10c-c3da-41eb-96b2-629d67cfb31f" containerID="21bf6bf5228a798ba24e02287093bd82c692d5a50ac902514b7d5b0597423fe9" exitCode=0 Oct 07 09:07:31 crc kubenswrapper[4875]: I1007 09:07:31.679861 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" event={"ID":"3928c10c-c3da-41eb-96b2-629d67cfb31f","Type":"ContainerDied","Data":"21bf6bf5228a798ba24e02287093bd82c692d5a50ac902514b7d5b0597423fe9"} Oct 07 09:07:31 crc kubenswrapper[4875]: I1007 09:07:31.679916 4875 scope.go:117] "RemoveContainer" containerID="88afb29220bf87355f3a81b8423e1e6c82592115d5539c502be5868130a1976f" Oct 07 09:07:31 crc kubenswrapper[4875]: I1007 09:07:31.680873 4875 scope.go:117] "RemoveContainer" containerID="21bf6bf5228a798ba24e02287093bd82c692d5a50ac902514b7d5b0597423fe9" Oct 07 09:07:31 crc kubenswrapper[4875]: E1007 09:07:31.683724 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hx68m_openshift-machine-config-operator(3928c10c-c3da-41eb-96b2-629d67cfb31f)\"" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" podUID="3928c10c-c3da-41eb-96b2-629d67cfb31f" Oct 07 09:07:43 crc kubenswrapper[4875]: I1007 09:07:43.706998 4875 scope.go:117] "RemoveContainer" containerID="21bf6bf5228a798ba24e02287093bd82c692d5a50ac902514b7d5b0597423fe9" Oct 07 09:07:43 crc kubenswrapper[4875]: E1007 09:07:43.707991 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hx68m_openshift-machine-config-operator(3928c10c-c3da-41eb-96b2-629d67cfb31f)\"" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" podUID="3928c10c-c3da-41eb-96b2-629d67cfb31f" Oct 07 09:07:56 crc kubenswrapper[4875]: I1007 09:07:56.697072 4875 scope.go:117] "RemoveContainer" containerID="21bf6bf5228a798ba24e02287093bd82c692d5a50ac902514b7d5b0597423fe9" Oct 07 09:07:56 crc kubenswrapper[4875]: E1007 09:07:56.698014 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hx68m_openshift-machine-config-operator(3928c10c-c3da-41eb-96b2-629d67cfb31f)\"" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" podUID="3928c10c-c3da-41eb-96b2-629d67cfb31f" Oct 07 09:07:58 crc kubenswrapper[4875]: I1007 09:07:58.971831 4875 generic.go:334] "Generic (PLEG): container finished" podID="5ba0d4cd-d615-4db2-86d6-849496631712" containerID="a4a8aba01167c52407bcf04112de43d6ad4946ab2f2a5efb6e2836f84365a364" exitCode=0 Oct 07 09:07:58 crc kubenswrapper[4875]: I1007 09:07:58.971927 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-xfnzz/must-gather-ff6lw" event={"ID":"5ba0d4cd-d615-4db2-86d6-849496631712","Type":"ContainerDied","Data":"a4a8aba01167c52407bcf04112de43d6ad4946ab2f2a5efb6e2836f84365a364"} Oct 07 09:07:58 crc kubenswrapper[4875]: I1007 09:07:58.972861 4875 scope.go:117] "RemoveContainer" containerID="a4a8aba01167c52407bcf04112de43d6ad4946ab2f2a5efb6e2836f84365a364" Oct 07 09:07:59 crc kubenswrapper[4875]: I1007 09:07:59.129228 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-xfnzz_must-gather-ff6lw_5ba0d4cd-d615-4db2-86d6-849496631712/gather/0.log" Oct 07 09:08:07 crc kubenswrapper[4875]: I1007 09:08:07.697332 4875 scope.go:117] "RemoveContainer" containerID="21bf6bf5228a798ba24e02287093bd82c692d5a50ac902514b7d5b0597423fe9" Oct 07 09:08:07 crc kubenswrapper[4875]: E1007 09:08:07.698286 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hx68m_openshift-machine-config-operator(3928c10c-c3da-41eb-96b2-629d67cfb31f)\"" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" podUID="3928c10c-c3da-41eb-96b2-629d67cfb31f" Oct 07 09:08:10 crc kubenswrapper[4875]: I1007 09:08:10.536410 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-xfnzz/must-gather-ff6lw"] Oct 07 09:08:10 crc kubenswrapper[4875]: I1007 09:08:10.536668 4875 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-xfnzz/must-gather-ff6lw" podUID="5ba0d4cd-d615-4db2-86d6-849496631712" containerName="copy" containerID="cri-o://9f63fe6805dd9c76a351630968a495393aa47c7ad00afe73f27d084a1de2422a" gracePeriod=2 Oct 07 09:08:10 crc kubenswrapper[4875]: I1007 09:08:10.545532 4875 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-xfnzz/must-gather-ff6lw"] Oct 07 09:08:10 crc kubenswrapper[4875]: I1007 09:08:10.969575 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-xfnzz_must-gather-ff6lw_5ba0d4cd-d615-4db2-86d6-849496631712/copy/0.log" Oct 07 09:08:10 crc kubenswrapper[4875]: I1007 09:08:10.970257 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-xfnzz/must-gather-ff6lw" Oct 07 09:08:11 crc kubenswrapper[4875]: I1007 09:08:11.080411 4875 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-xfnzz_must-gather-ff6lw_5ba0d4cd-d615-4db2-86d6-849496631712/copy/0.log" Oct 07 09:08:11 crc kubenswrapper[4875]: I1007 09:08:11.080802 4875 generic.go:334] "Generic (PLEG): container finished" podID="5ba0d4cd-d615-4db2-86d6-849496631712" containerID="9f63fe6805dd9c76a351630968a495393aa47c7ad00afe73f27d084a1de2422a" exitCode=143 Oct 07 09:08:11 crc kubenswrapper[4875]: I1007 09:08:11.080898 4875 scope.go:117] "RemoveContainer" containerID="9f63fe6805dd9c76a351630968a495393aa47c7ad00afe73f27d084a1de2422a" Oct 07 09:08:11 crc kubenswrapper[4875]: I1007 09:08:11.080907 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-xfnzz/must-gather-ff6lw" Oct 07 09:08:11 crc kubenswrapper[4875]: I1007 09:08:11.097520 4875 scope.go:117] "RemoveContainer" containerID="a4a8aba01167c52407bcf04112de43d6ad4946ab2f2a5efb6e2836f84365a364" Oct 07 09:08:11 crc kubenswrapper[4875]: I1007 09:08:11.098254 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/5ba0d4cd-d615-4db2-86d6-849496631712-must-gather-output\") pod \"5ba0d4cd-d615-4db2-86d6-849496631712\" (UID: \"5ba0d4cd-d615-4db2-86d6-849496631712\") " Oct 07 09:08:11 crc kubenswrapper[4875]: I1007 09:08:11.098301 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lzczh\" (UniqueName: \"kubernetes.io/projected/5ba0d4cd-d615-4db2-86d6-849496631712-kube-api-access-lzczh\") pod \"5ba0d4cd-d615-4db2-86d6-849496631712\" (UID: \"5ba0d4cd-d615-4db2-86d6-849496631712\") " Oct 07 09:08:11 crc kubenswrapper[4875]: I1007 09:08:11.109936 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5ba0d4cd-d615-4db2-86d6-849496631712-kube-api-access-lzczh" (OuterVolumeSpecName: "kube-api-access-lzczh") pod "5ba0d4cd-d615-4db2-86d6-849496631712" (UID: "5ba0d4cd-d615-4db2-86d6-849496631712"). InnerVolumeSpecName "kube-api-access-lzczh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 09:08:11 crc kubenswrapper[4875]: I1007 09:08:11.184694 4875 scope.go:117] "RemoveContainer" containerID="9f63fe6805dd9c76a351630968a495393aa47c7ad00afe73f27d084a1de2422a" Oct 07 09:08:11 crc kubenswrapper[4875]: E1007 09:08:11.185383 4875 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9f63fe6805dd9c76a351630968a495393aa47c7ad00afe73f27d084a1de2422a\": container with ID starting with 9f63fe6805dd9c76a351630968a495393aa47c7ad00afe73f27d084a1de2422a not found: ID does not exist" containerID="9f63fe6805dd9c76a351630968a495393aa47c7ad00afe73f27d084a1de2422a" Oct 07 09:08:11 crc kubenswrapper[4875]: I1007 09:08:11.185426 4875 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9f63fe6805dd9c76a351630968a495393aa47c7ad00afe73f27d084a1de2422a"} err="failed to get container status \"9f63fe6805dd9c76a351630968a495393aa47c7ad00afe73f27d084a1de2422a\": rpc error: code = NotFound desc = could not find container \"9f63fe6805dd9c76a351630968a495393aa47c7ad00afe73f27d084a1de2422a\": container with ID starting with 9f63fe6805dd9c76a351630968a495393aa47c7ad00afe73f27d084a1de2422a not found: ID does not exist" Oct 07 09:08:11 crc kubenswrapper[4875]: I1007 09:08:11.185453 4875 scope.go:117] "RemoveContainer" containerID="a4a8aba01167c52407bcf04112de43d6ad4946ab2f2a5efb6e2836f84365a364" Oct 07 09:08:11 crc kubenswrapper[4875]: E1007 09:08:11.186548 4875 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a4a8aba01167c52407bcf04112de43d6ad4946ab2f2a5efb6e2836f84365a364\": container with ID starting with a4a8aba01167c52407bcf04112de43d6ad4946ab2f2a5efb6e2836f84365a364 not found: ID does not exist" containerID="a4a8aba01167c52407bcf04112de43d6ad4946ab2f2a5efb6e2836f84365a364" Oct 07 09:08:11 crc kubenswrapper[4875]: I1007 09:08:11.186610 4875 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a4a8aba01167c52407bcf04112de43d6ad4946ab2f2a5efb6e2836f84365a364"} err="failed to get container status \"a4a8aba01167c52407bcf04112de43d6ad4946ab2f2a5efb6e2836f84365a364\": rpc error: code = NotFound desc = could not find container \"a4a8aba01167c52407bcf04112de43d6ad4946ab2f2a5efb6e2836f84365a364\": container with ID starting with a4a8aba01167c52407bcf04112de43d6ad4946ab2f2a5efb6e2836f84365a364 not found: ID does not exist" Oct 07 09:08:11 crc kubenswrapper[4875]: I1007 09:08:11.201251 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lzczh\" (UniqueName: \"kubernetes.io/projected/5ba0d4cd-d615-4db2-86d6-849496631712-kube-api-access-lzczh\") on node \"crc\" DevicePath \"\"" Oct 07 09:08:11 crc kubenswrapper[4875]: I1007 09:08:11.279800 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5ba0d4cd-d615-4db2-86d6-849496631712-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "5ba0d4cd-d615-4db2-86d6-849496631712" (UID: "5ba0d4cd-d615-4db2-86d6-849496631712"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 09:08:11 crc kubenswrapper[4875]: I1007 09:08:11.303376 4875 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/5ba0d4cd-d615-4db2-86d6-849496631712-must-gather-output\") on node \"crc\" DevicePath \"\"" Oct 07 09:08:11 crc kubenswrapper[4875]: I1007 09:08:11.707896 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5ba0d4cd-d615-4db2-86d6-849496631712" path="/var/lib/kubelet/pods/5ba0d4cd-d615-4db2-86d6-849496631712/volumes" Oct 07 09:08:21 crc kubenswrapper[4875]: I1007 09:08:21.454903 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-69fcn"] Oct 07 09:08:21 crc kubenswrapper[4875]: E1007 09:08:21.456052 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5ba0d4cd-d615-4db2-86d6-849496631712" containerName="copy" Oct 07 09:08:21 crc kubenswrapper[4875]: I1007 09:08:21.456073 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="5ba0d4cd-d615-4db2-86d6-849496631712" containerName="copy" Oct 07 09:08:21 crc kubenswrapper[4875]: E1007 09:08:21.456092 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5ba0d4cd-d615-4db2-86d6-849496631712" containerName="gather" Oct 07 09:08:21 crc kubenswrapper[4875]: I1007 09:08:21.456103 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="5ba0d4cd-d615-4db2-86d6-849496631712" containerName="gather" Oct 07 09:08:21 crc kubenswrapper[4875]: E1007 09:08:21.456116 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9518cf99-8e53-4b9b-bb4b-0dccb35b4a0d" containerName="extract-utilities" Oct 07 09:08:21 crc kubenswrapper[4875]: I1007 09:08:21.456126 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="9518cf99-8e53-4b9b-bb4b-0dccb35b4a0d" containerName="extract-utilities" Oct 07 09:08:21 crc kubenswrapper[4875]: E1007 09:08:21.456165 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9518cf99-8e53-4b9b-bb4b-0dccb35b4a0d" containerName="extract-content" Oct 07 09:08:21 crc kubenswrapper[4875]: I1007 09:08:21.456179 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="9518cf99-8e53-4b9b-bb4b-0dccb35b4a0d" containerName="extract-content" Oct 07 09:08:21 crc kubenswrapper[4875]: E1007 09:08:21.456222 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9518cf99-8e53-4b9b-bb4b-0dccb35b4a0d" containerName="registry-server" Oct 07 09:08:21 crc kubenswrapper[4875]: I1007 09:08:21.456233 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="9518cf99-8e53-4b9b-bb4b-0dccb35b4a0d" containerName="registry-server" Oct 07 09:08:21 crc kubenswrapper[4875]: I1007 09:08:21.456519 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="9518cf99-8e53-4b9b-bb4b-0dccb35b4a0d" containerName="registry-server" Oct 07 09:08:21 crc kubenswrapper[4875]: I1007 09:08:21.456571 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="5ba0d4cd-d615-4db2-86d6-849496631712" containerName="copy" Oct 07 09:08:21 crc kubenswrapper[4875]: I1007 09:08:21.456628 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="5ba0d4cd-d615-4db2-86d6-849496631712" containerName="gather" Oct 07 09:08:21 crc kubenswrapper[4875]: I1007 09:08:21.464534 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-69fcn" Oct 07 09:08:21 crc kubenswrapper[4875]: I1007 09:08:21.476800 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-69fcn"] Oct 07 09:08:21 crc kubenswrapper[4875]: I1007 09:08:21.601667 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hqf67\" (UniqueName: \"kubernetes.io/projected/ac639231-8cc2-4efd-90cc-8d27167e877c-kube-api-access-hqf67\") pod \"redhat-operators-69fcn\" (UID: \"ac639231-8cc2-4efd-90cc-8d27167e877c\") " pod="openshift-marketplace/redhat-operators-69fcn" Oct 07 09:08:21 crc kubenswrapper[4875]: I1007 09:08:21.601768 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ac639231-8cc2-4efd-90cc-8d27167e877c-catalog-content\") pod \"redhat-operators-69fcn\" (UID: \"ac639231-8cc2-4efd-90cc-8d27167e877c\") " pod="openshift-marketplace/redhat-operators-69fcn" Oct 07 09:08:21 crc kubenswrapper[4875]: I1007 09:08:21.601790 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ac639231-8cc2-4efd-90cc-8d27167e877c-utilities\") pod \"redhat-operators-69fcn\" (UID: \"ac639231-8cc2-4efd-90cc-8d27167e877c\") " pod="openshift-marketplace/redhat-operators-69fcn" Oct 07 09:08:21 crc kubenswrapper[4875]: I1007 09:08:21.697565 4875 scope.go:117] "RemoveContainer" containerID="21bf6bf5228a798ba24e02287093bd82c692d5a50ac902514b7d5b0597423fe9" Oct 07 09:08:21 crc kubenswrapper[4875]: E1007 09:08:21.697889 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hx68m_openshift-machine-config-operator(3928c10c-c3da-41eb-96b2-629d67cfb31f)\"" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" podUID="3928c10c-c3da-41eb-96b2-629d67cfb31f" Oct 07 09:08:21 crc kubenswrapper[4875]: I1007 09:08:21.703312 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ac639231-8cc2-4efd-90cc-8d27167e877c-catalog-content\") pod \"redhat-operators-69fcn\" (UID: \"ac639231-8cc2-4efd-90cc-8d27167e877c\") " pod="openshift-marketplace/redhat-operators-69fcn" Oct 07 09:08:21 crc kubenswrapper[4875]: I1007 09:08:21.703365 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ac639231-8cc2-4efd-90cc-8d27167e877c-utilities\") pod \"redhat-operators-69fcn\" (UID: \"ac639231-8cc2-4efd-90cc-8d27167e877c\") " pod="openshift-marketplace/redhat-operators-69fcn" Oct 07 09:08:21 crc kubenswrapper[4875]: I1007 09:08:21.703499 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hqf67\" (UniqueName: \"kubernetes.io/projected/ac639231-8cc2-4efd-90cc-8d27167e877c-kube-api-access-hqf67\") pod \"redhat-operators-69fcn\" (UID: \"ac639231-8cc2-4efd-90cc-8d27167e877c\") " pod="openshift-marketplace/redhat-operators-69fcn" Oct 07 09:08:21 crc kubenswrapper[4875]: I1007 09:08:21.703936 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ac639231-8cc2-4efd-90cc-8d27167e877c-catalog-content\") pod \"redhat-operators-69fcn\" (UID: \"ac639231-8cc2-4efd-90cc-8d27167e877c\") " pod="openshift-marketplace/redhat-operators-69fcn" Oct 07 09:08:21 crc kubenswrapper[4875]: I1007 09:08:21.703978 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ac639231-8cc2-4efd-90cc-8d27167e877c-utilities\") pod \"redhat-operators-69fcn\" (UID: \"ac639231-8cc2-4efd-90cc-8d27167e877c\") " pod="openshift-marketplace/redhat-operators-69fcn" Oct 07 09:08:21 crc kubenswrapper[4875]: I1007 09:08:21.723225 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hqf67\" (UniqueName: \"kubernetes.io/projected/ac639231-8cc2-4efd-90cc-8d27167e877c-kube-api-access-hqf67\") pod \"redhat-operators-69fcn\" (UID: \"ac639231-8cc2-4efd-90cc-8d27167e877c\") " pod="openshift-marketplace/redhat-operators-69fcn" Oct 07 09:08:21 crc kubenswrapper[4875]: I1007 09:08:21.799862 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-69fcn" Oct 07 09:08:22 crc kubenswrapper[4875]: I1007 09:08:22.292678 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-69fcn"] Oct 07 09:08:23 crc kubenswrapper[4875]: I1007 09:08:23.183557 4875 generic.go:334] "Generic (PLEG): container finished" podID="ac639231-8cc2-4efd-90cc-8d27167e877c" containerID="ecfcf0b68a25f1947254e4d9680e17b73f1b8b372580c8fc26b9d6f627fa48b9" exitCode=0 Oct 07 09:08:23 crc kubenswrapper[4875]: I1007 09:08:23.183630 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-69fcn" event={"ID":"ac639231-8cc2-4efd-90cc-8d27167e877c","Type":"ContainerDied","Data":"ecfcf0b68a25f1947254e4d9680e17b73f1b8b372580c8fc26b9d6f627fa48b9"} Oct 07 09:08:23 crc kubenswrapper[4875]: I1007 09:08:23.183995 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-69fcn" event={"ID":"ac639231-8cc2-4efd-90cc-8d27167e877c","Type":"ContainerStarted","Data":"238c6e5fac43a921aa9fc44417b909099f8023e68441b4f4b3665e159319936e"} Oct 07 09:08:25 crc kubenswrapper[4875]: I1007 09:08:25.206585 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-69fcn" event={"ID":"ac639231-8cc2-4efd-90cc-8d27167e877c","Type":"ContainerStarted","Data":"d85bac7343ec5aaec40335ed5220b0e8044313bb242a7d505efc82e70afc79c7"} Oct 07 09:08:27 crc kubenswrapper[4875]: I1007 09:08:27.238147 4875 generic.go:334] "Generic (PLEG): container finished" podID="ac639231-8cc2-4efd-90cc-8d27167e877c" containerID="d85bac7343ec5aaec40335ed5220b0e8044313bb242a7d505efc82e70afc79c7" exitCode=0 Oct 07 09:08:27 crc kubenswrapper[4875]: I1007 09:08:27.238391 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-69fcn" event={"ID":"ac639231-8cc2-4efd-90cc-8d27167e877c","Type":"ContainerDied","Data":"d85bac7343ec5aaec40335ed5220b0e8044313bb242a7d505efc82e70afc79c7"} Oct 07 09:08:28 crc kubenswrapper[4875]: I1007 09:08:28.265217 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-69fcn" event={"ID":"ac639231-8cc2-4efd-90cc-8d27167e877c","Type":"ContainerStarted","Data":"a1515b3bb0b1a1fa9142753964f80db9d01130c01780d1ac43746467c7df98e0"} Oct 07 09:08:28 crc kubenswrapper[4875]: I1007 09:08:28.297783 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-69fcn" podStartSLOduration=2.812780735 podStartE2EDuration="7.297740767s" podCreationTimestamp="2025-10-07 09:08:21 +0000 UTC" firstStartedPulling="2025-10-07 09:08:23.185817163 +0000 UTC m=+4328.145587706" lastFinishedPulling="2025-10-07 09:08:27.670777195 +0000 UTC m=+4332.630547738" observedRunningTime="2025-10-07 09:08:28.296453156 +0000 UTC m=+4333.256223739" watchObservedRunningTime="2025-10-07 09:08:28.297740767 +0000 UTC m=+4333.257511320" Oct 07 09:08:31 crc kubenswrapper[4875]: I1007 09:08:31.800436 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-69fcn" Oct 07 09:08:31 crc kubenswrapper[4875]: I1007 09:08:31.801078 4875 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-69fcn" Oct 07 09:08:32 crc kubenswrapper[4875]: I1007 09:08:32.855959 4875 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-69fcn" podUID="ac639231-8cc2-4efd-90cc-8d27167e877c" containerName="registry-server" probeResult="failure" output=< Oct 07 09:08:32 crc kubenswrapper[4875]: timeout: failed to connect service ":50051" within 1s Oct 07 09:08:32 crc kubenswrapper[4875]: > Oct 07 09:08:34 crc kubenswrapper[4875]: I1007 09:08:34.699370 4875 scope.go:117] "RemoveContainer" containerID="21bf6bf5228a798ba24e02287093bd82c692d5a50ac902514b7d5b0597423fe9" Oct 07 09:08:34 crc kubenswrapper[4875]: E1007 09:08:34.699958 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hx68m_openshift-machine-config-operator(3928c10c-c3da-41eb-96b2-629d67cfb31f)\"" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" podUID="3928c10c-c3da-41eb-96b2-629d67cfb31f" Oct 07 09:08:41 crc kubenswrapper[4875]: I1007 09:08:41.848992 4875 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-69fcn" Oct 07 09:08:41 crc kubenswrapper[4875]: I1007 09:08:41.898300 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-69fcn" Oct 07 09:08:42 crc kubenswrapper[4875]: I1007 09:08:42.088328 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-69fcn"] Oct 07 09:08:43 crc kubenswrapper[4875]: I1007 09:08:43.398802 4875 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-69fcn" podUID="ac639231-8cc2-4efd-90cc-8d27167e877c" containerName="registry-server" containerID="cri-o://a1515b3bb0b1a1fa9142753964f80db9d01130c01780d1ac43746467c7df98e0" gracePeriod=2 Oct 07 09:08:43 crc kubenswrapper[4875]: I1007 09:08:43.900169 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-69fcn" Oct 07 09:08:43 crc kubenswrapper[4875]: I1007 09:08:43.979683 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hqf67\" (UniqueName: \"kubernetes.io/projected/ac639231-8cc2-4efd-90cc-8d27167e877c-kube-api-access-hqf67\") pod \"ac639231-8cc2-4efd-90cc-8d27167e877c\" (UID: \"ac639231-8cc2-4efd-90cc-8d27167e877c\") " Oct 07 09:08:43 crc kubenswrapper[4875]: I1007 09:08:43.979897 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ac639231-8cc2-4efd-90cc-8d27167e877c-utilities\") pod \"ac639231-8cc2-4efd-90cc-8d27167e877c\" (UID: \"ac639231-8cc2-4efd-90cc-8d27167e877c\") " Oct 07 09:08:43 crc kubenswrapper[4875]: I1007 09:08:43.979971 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ac639231-8cc2-4efd-90cc-8d27167e877c-catalog-content\") pod \"ac639231-8cc2-4efd-90cc-8d27167e877c\" (UID: \"ac639231-8cc2-4efd-90cc-8d27167e877c\") " Oct 07 09:08:43 crc kubenswrapper[4875]: I1007 09:08:43.980605 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ac639231-8cc2-4efd-90cc-8d27167e877c-utilities" (OuterVolumeSpecName: "utilities") pod "ac639231-8cc2-4efd-90cc-8d27167e877c" (UID: "ac639231-8cc2-4efd-90cc-8d27167e877c"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 09:08:43 crc kubenswrapper[4875]: I1007 09:08:43.985786 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ac639231-8cc2-4efd-90cc-8d27167e877c-kube-api-access-hqf67" (OuterVolumeSpecName: "kube-api-access-hqf67") pod "ac639231-8cc2-4efd-90cc-8d27167e877c" (UID: "ac639231-8cc2-4efd-90cc-8d27167e877c"). InnerVolumeSpecName "kube-api-access-hqf67". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 09:08:44 crc kubenswrapper[4875]: I1007 09:08:44.072936 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ac639231-8cc2-4efd-90cc-8d27167e877c-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ac639231-8cc2-4efd-90cc-8d27167e877c" (UID: "ac639231-8cc2-4efd-90cc-8d27167e877c"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 09:08:44 crc kubenswrapper[4875]: I1007 09:08:44.082409 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hqf67\" (UniqueName: \"kubernetes.io/projected/ac639231-8cc2-4efd-90cc-8d27167e877c-kube-api-access-hqf67\") on node \"crc\" DevicePath \"\"" Oct 07 09:08:44 crc kubenswrapper[4875]: I1007 09:08:44.082459 4875 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ac639231-8cc2-4efd-90cc-8d27167e877c-utilities\") on node \"crc\" DevicePath \"\"" Oct 07 09:08:44 crc kubenswrapper[4875]: I1007 09:08:44.082472 4875 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ac639231-8cc2-4efd-90cc-8d27167e877c-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 07 09:08:44 crc kubenswrapper[4875]: I1007 09:08:44.411335 4875 generic.go:334] "Generic (PLEG): container finished" podID="ac639231-8cc2-4efd-90cc-8d27167e877c" containerID="a1515b3bb0b1a1fa9142753964f80db9d01130c01780d1ac43746467c7df98e0" exitCode=0 Oct 07 09:08:44 crc kubenswrapper[4875]: I1007 09:08:44.411386 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-69fcn" event={"ID":"ac639231-8cc2-4efd-90cc-8d27167e877c","Type":"ContainerDied","Data":"a1515b3bb0b1a1fa9142753964f80db9d01130c01780d1ac43746467c7df98e0"} Oct 07 09:08:44 crc kubenswrapper[4875]: I1007 09:08:44.411418 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-69fcn" event={"ID":"ac639231-8cc2-4efd-90cc-8d27167e877c","Type":"ContainerDied","Data":"238c6e5fac43a921aa9fc44417b909099f8023e68441b4f4b3665e159319936e"} Oct 07 09:08:44 crc kubenswrapper[4875]: I1007 09:08:44.411420 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-69fcn" Oct 07 09:08:44 crc kubenswrapper[4875]: I1007 09:08:44.411483 4875 scope.go:117] "RemoveContainer" containerID="a1515b3bb0b1a1fa9142753964f80db9d01130c01780d1ac43746467c7df98e0" Oct 07 09:08:44 crc kubenswrapper[4875]: I1007 09:08:44.431651 4875 scope.go:117] "RemoveContainer" containerID="d85bac7343ec5aaec40335ed5220b0e8044313bb242a7d505efc82e70afc79c7" Oct 07 09:08:44 crc kubenswrapper[4875]: I1007 09:08:44.446445 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-69fcn"] Oct 07 09:08:44 crc kubenswrapper[4875]: I1007 09:08:44.459838 4875 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-69fcn"] Oct 07 09:08:44 crc kubenswrapper[4875]: I1007 09:08:44.471926 4875 scope.go:117] "RemoveContainer" containerID="ecfcf0b68a25f1947254e4d9680e17b73f1b8b372580c8fc26b9d6f627fa48b9" Oct 07 09:08:44 crc kubenswrapper[4875]: I1007 09:08:44.498433 4875 scope.go:117] "RemoveContainer" containerID="a1515b3bb0b1a1fa9142753964f80db9d01130c01780d1ac43746467c7df98e0" Oct 07 09:08:44 crc kubenswrapper[4875]: E1007 09:08:44.498893 4875 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a1515b3bb0b1a1fa9142753964f80db9d01130c01780d1ac43746467c7df98e0\": container with ID starting with a1515b3bb0b1a1fa9142753964f80db9d01130c01780d1ac43746467c7df98e0 not found: ID does not exist" containerID="a1515b3bb0b1a1fa9142753964f80db9d01130c01780d1ac43746467c7df98e0" Oct 07 09:08:44 crc kubenswrapper[4875]: I1007 09:08:44.499033 4875 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a1515b3bb0b1a1fa9142753964f80db9d01130c01780d1ac43746467c7df98e0"} err="failed to get container status \"a1515b3bb0b1a1fa9142753964f80db9d01130c01780d1ac43746467c7df98e0\": rpc error: code = NotFound desc = could not find container \"a1515b3bb0b1a1fa9142753964f80db9d01130c01780d1ac43746467c7df98e0\": container with ID starting with a1515b3bb0b1a1fa9142753964f80db9d01130c01780d1ac43746467c7df98e0 not found: ID does not exist" Oct 07 09:08:44 crc kubenswrapper[4875]: I1007 09:08:44.499129 4875 scope.go:117] "RemoveContainer" containerID="d85bac7343ec5aaec40335ed5220b0e8044313bb242a7d505efc82e70afc79c7" Oct 07 09:08:44 crc kubenswrapper[4875]: E1007 09:08:44.499945 4875 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d85bac7343ec5aaec40335ed5220b0e8044313bb242a7d505efc82e70afc79c7\": container with ID starting with d85bac7343ec5aaec40335ed5220b0e8044313bb242a7d505efc82e70afc79c7 not found: ID does not exist" containerID="d85bac7343ec5aaec40335ed5220b0e8044313bb242a7d505efc82e70afc79c7" Oct 07 09:08:44 crc kubenswrapper[4875]: I1007 09:08:44.500005 4875 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d85bac7343ec5aaec40335ed5220b0e8044313bb242a7d505efc82e70afc79c7"} err="failed to get container status \"d85bac7343ec5aaec40335ed5220b0e8044313bb242a7d505efc82e70afc79c7\": rpc error: code = NotFound desc = could not find container \"d85bac7343ec5aaec40335ed5220b0e8044313bb242a7d505efc82e70afc79c7\": container with ID starting with d85bac7343ec5aaec40335ed5220b0e8044313bb242a7d505efc82e70afc79c7 not found: ID does not exist" Oct 07 09:08:44 crc kubenswrapper[4875]: I1007 09:08:44.500041 4875 scope.go:117] "RemoveContainer" containerID="ecfcf0b68a25f1947254e4d9680e17b73f1b8b372580c8fc26b9d6f627fa48b9" Oct 07 09:08:44 crc kubenswrapper[4875]: E1007 09:08:44.500458 4875 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ecfcf0b68a25f1947254e4d9680e17b73f1b8b372580c8fc26b9d6f627fa48b9\": container with ID starting with ecfcf0b68a25f1947254e4d9680e17b73f1b8b372580c8fc26b9d6f627fa48b9 not found: ID does not exist" containerID="ecfcf0b68a25f1947254e4d9680e17b73f1b8b372580c8fc26b9d6f627fa48b9" Oct 07 09:08:44 crc kubenswrapper[4875]: I1007 09:08:44.500511 4875 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ecfcf0b68a25f1947254e4d9680e17b73f1b8b372580c8fc26b9d6f627fa48b9"} err="failed to get container status \"ecfcf0b68a25f1947254e4d9680e17b73f1b8b372580c8fc26b9d6f627fa48b9\": rpc error: code = NotFound desc = could not find container \"ecfcf0b68a25f1947254e4d9680e17b73f1b8b372580c8fc26b9d6f627fa48b9\": container with ID starting with ecfcf0b68a25f1947254e4d9680e17b73f1b8b372580c8fc26b9d6f627fa48b9 not found: ID does not exist" Oct 07 09:08:45 crc kubenswrapper[4875]: I1007 09:08:45.713911 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ac639231-8cc2-4efd-90cc-8d27167e877c" path="/var/lib/kubelet/pods/ac639231-8cc2-4efd-90cc-8d27167e877c/volumes" Oct 07 09:08:48 crc kubenswrapper[4875]: I1007 09:08:48.698900 4875 scope.go:117] "RemoveContainer" containerID="21bf6bf5228a798ba24e02287093bd82c692d5a50ac902514b7d5b0597423fe9" Oct 07 09:08:48 crc kubenswrapper[4875]: E1007 09:08:48.699434 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hx68m_openshift-machine-config-operator(3928c10c-c3da-41eb-96b2-629d67cfb31f)\"" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" podUID="3928c10c-c3da-41eb-96b2-629d67cfb31f" Oct 07 09:08:54 crc kubenswrapper[4875]: I1007 09:08:54.856018 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-56xrp"] Oct 07 09:08:54 crc kubenswrapper[4875]: E1007 09:08:54.857099 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ac639231-8cc2-4efd-90cc-8d27167e877c" containerName="registry-server" Oct 07 09:08:54 crc kubenswrapper[4875]: I1007 09:08:54.857117 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="ac639231-8cc2-4efd-90cc-8d27167e877c" containerName="registry-server" Oct 07 09:08:54 crc kubenswrapper[4875]: E1007 09:08:54.857145 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ac639231-8cc2-4efd-90cc-8d27167e877c" containerName="extract-utilities" Oct 07 09:08:54 crc kubenswrapper[4875]: I1007 09:08:54.857154 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="ac639231-8cc2-4efd-90cc-8d27167e877c" containerName="extract-utilities" Oct 07 09:08:54 crc kubenswrapper[4875]: E1007 09:08:54.857179 4875 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ac639231-8cc2-4efd-90cc-8d27167e877c" containerName="extract-content" Oct 07 09:08:54 crc kubenswrapper[4875]: I1007 09:08:54.857188 4875 state_mem.go:107] "Deleted CPUSet assignment" podUID="ac639231-8cc2-4efd-90cc-8d27167e877c" containerName="extract-content" Oct 07 09:08:54 crc kubenswrapper[4875]: I1007 09:08:54.857423 4875 memory_manager.go:354] "RemoveStaleState removing state" podUID="ac639231-8cc2-4efd-90cc-8d27167e877c" containerName="registry-server" Oct 07 09:08:54 crc kubenswrapper[4875]: I1007 09:08:54.858972 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-56xrp" Oct 07 09:08:54 crc kubenswrapper[4875]: I1007 09:08:54.866783 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-56xrp"] Oct 07 09:08:54 crc kubenswrapper[4875]: I1007 09:08:54.975059 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5595ca31-670e-4b1c-bd83-4c1e15d89ed3-catalog-content\") pod \"certified-operators-56xrp\" (UID: \"5595ca31-670e-4b1c-bd83-4c1e15d89ed3\") " pod="openshift-marketplace/certified-operators-56xrp" Oct 07 09:08:54 crc kubenswrapper[4875]: I1007 09:08:54.975102 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wsgk4\" (UniqueName: \"kubernetes.io/projected/5595ca31-670e-4b1c-bd83-4c1e15d89ed3-kube-api-access-wsgk4\") pod \"certified-operators-56xrp\" (UID: \"5595ca31-670e-4b1c-bd83-4c1e15d89ed3\") " pod="openshift-marketplace/certified-operators-56xrp" Oct 07 09:08:54 crc kubenswrapper[4875]: I1007 09:08:54.975152 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5595ca31-670e-4b1c-bd83-4c1e15d89ed3-utilities\") pod \"certified-operators-56xrp\" (UID: \"5595ca31-670e-4b1c-bd83-4c1e15d89ed3\") " pod="openshift-marketplace/certified-operators-56xrp" Oct 07 09:08:55 crc kubenswrapper[4875]: I1007 09:08:55.077230 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5595ca31-670e-4b1c-bd83-4c1e15d89ed3-utilities\") pod \"certified-operators-56xrp\" (UID: \"5595ca31-670e-4b1c-bd83-4c1e15d89ed3\") " pod="openshift-marketplace/certified-operators-56xrp" Oct 07 09:08:55 crc kubenswrapper[4875]: I1007 09:08:55.077393 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5595ca31-670e-4b1c-bd83-4c1e15d89ed3-catalog-content\") pod \"certified-operators-56xrp\" (UID: \"5595ca31-670e-4b1c-bd83-4c1e15d89ed3\") " pod="openshift-marketplace/certified-operators-56xrp" Oct 07 09:08:55 crc kubenswrapper[4875]: I1007 09:08:55.077415 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wsgk4\" (UniqueName: \"kubernetes.io/projected/5595ca31-670e-4b1c-bd83-4c1e15d89ed3-kube-api-access-wsgk4\") pod \"certified-operators-56xrp\" (UID: \"5595ca31-670e-4b1c-bd83-4c1e15d89ed3\") " pod="openshift-marketplace/certified-operators-56xrp" Oct 07 09:08:55 crc kubenswrapper[4875]: I1007 09:08:55.077806 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5595ca31-670e-4b1c-bd83-4c1e15d89ed3-utilities\") pod \"certified-operators-56xrp\" (UID: \"5595ca31-670e-4b1c-bd83-4c1e15d89ed3\") " pod="openshift-marketplace/certified-operators-56xrp" Oct 07 09:08:55 crc kubenswrapper[4875]: I1007 09:08:55.077846 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5595ca31-670e-4b1c-bd83-4c1e15d89ed3-catalog-content\") pod \"certified-operators-56xrp\" (UID: \"5595ca31-670e-4b1c-bd83-4c1e15d89ed3\") " pod="openshift-marketplace/certified-operators-56xrp" Oct 07 09:08:55 crc kubenswrapper[4875]: I1007 09:08:55.098646 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wsgk4\" (UniqueName: \"kubernetes.io/projected/5595ca31-670e-4b1c-bd83-4c1e15d89ed3-kube-api-access-wsgk4\") pod \"certified-operators-56xrp\" (UID: \"5595ca31-670e-4b1c-bd83-4c1e15d89ed3\") " pod="openshift-marketplace/certified-operators-56xrp" Oct 07 09:08:55 crc kubenswrapper[4875]: I1007 09:08:55.200078 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-56xrp" Oct 07 09:08:55 crc kubenswrapper[4875]: I1007 09:08:55.721080 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-56xrp"] Oct 07 09:08:56 crc kubenswrapper[4875]: I1007 09:08:56.515021 4875 generic.go:334] "Generic (PLEG): container finished" podID="5595ca31-670e-4b1c-bd83-4c1e15d89ed3" containerID="15d177f4c66c6c2f84719c9e3237b6887004cb8421df266219ef3b95001431f2" exitCode=0 Oct 07 09:08:56 crc kubenswrapper[4875]: I1007 09:08:56.515262 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-56xrp" event={"ID":"5595ca31-670e-4b1c-bd83-4c1e15d89ed3","Type":"ContainerDied","Data":"15d177f4c66c6c2f84719c9e3237b6887004cb8421df266219ef3b95001431f2"} Oct 07 09:08:56 crc kubenswrapper[4875]: I1007 09:08:56.515930 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-56xrp" event={"ID":"5595ca31-670e-4b1c-bd83-4c1e15d89ed3","Type":"ContainerStarted","Data":"e8753ac81f51c78890b7494d77d41ccf655e60b4871092df64dd9a2a70b98bae"} Oct 07 09:08:57 crc kubenswrapper[4875]: I1007 09:08:57.456330 4875 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-4bf8n"] Oct 07 09:08:57 crc kubenswrapper[4875]: I1007 09:08:57.459086 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-4bf8n" Oct 07 09:08:57 crc kubenswrapper[4875]: I1007 09:08:57.468944 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-4bf8n"] Oct 07 09:08:57 crc kubenswrapper[4875]: I1007 09:08:57.529661 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-56xrp" event={"ID":"5595ca31-670e-4b1c-bd83-4c1e15d89ed3","Type":"ContainerStarted","Data":"5e7c69300f98e56de9e121f59c9fe9a00a5517f731bfbdffad7f30bdfbfddec5"} Oct 07 09:08:57 crc kubenswrapper[4875]: I1007 09:08:57.622591 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e8a3118f-3456-4878-88d3-69fb07e4d6db-utilities\") pod \"community-operators-4bf8n\" (UID: \"e8a3118f-3456-4878-88d3-69fb07e4d6db\") " pod="openshift-marketplace/community-operators-4bf8n" Oct 07 09:08:57 crc kubenswrapper[4875]: I1007 09:08:57.622669 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-42sts\" (UniqueName: \"kubernetes.io/projected/e8a3118f-3456-4878-88d3-69fb07e4d6db-kube-api-access-42sts\") pod \"community-operators-4bf8n\" (UID: \"e8a3118f-3456-4878-88d3-69fb07e4d6db\") " pod="openshift-marketplace/community-operators-4bf8n" Oct 07 09:08:57 crc kubenswrapper[4875]: I1007 09:08:57.622720 4875 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e8a3118f-3456-4878-88d3-69fb07e4d6db-catalog-content\") pod \"community-operators-4bf8n\" (UID: \"e8a3118f-3456-4878-88d3-69fb07e4d6db\") " pod="openshift-marketplace/community-operators-4bf8n" Oct 07 09:08:57 crc kubenswrapper[4875]: I1007 09:08:57.725348 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-42sts\" (UniqueName: \"kubernetes.io/projected/e8a3118f-3456-4878-88d3-69fb07e4d6db-kube-api-access-42sts\") pod \"community-operators-4bf8n\" (UID: \"e8a3118f-3456-4878-88d3-69fb07e4d6db\") " pod="openshift-marketplace/community-operators-4bf8n" Oct 07 09:08:57 crc kubenswrapper[4875]: I1007 09:08:57.725682 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e8a3118f-3456-4878-88d3-69fb07e4d6db-catalog-content\") pod \"community-operators-4bf8n\" (UID: \"e8a3118f-3456-4878-88d3-69fb07e4d6db\") " pod="openshift-marketplace/community-operators-4bf8n" Oct 07 09:08:57 crc kubenswrapper[4875]: I1007 09:08:57.725884 4875 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e8a3118f-3456-4878-88d3-69fb07e4d6db-utilities\") pod \"community-operators-4bf8n\" (UID: \"e8a3118f-3456-4878-88d3-69fb07e4d6db\") " pod="openshift-marketplace/community-operators-4bf8n" Oct 07 09:08:57 crc kubenswrapper[4875]: I1007 09:08:57.726313 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e8a3118f-3456-4878-88d3-69fb07e4d6db-utilities\") pod \"community-operators-4bf8n\" (UID: \"e8a3118f-3456-4878-88d3-69fb07e4d6db\") " pod="openshift-marketplace/community-operators-4bf8n" Oct 07 09:08:57 crc kubenswrapper[4875]: I1007 09:08:57.727498 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e8a3118f-3456-4878-88d3-69fb07e4d6db-catalog-content\") pod \"community-operators-4bf8n\" (UID: \"e8a3118f-3456-4878-88d3-69fb07e4d6db\") " pod="openshift-marketplace/community-operators-4bf8n" Oct 07 09:08:57 crc kubenswrapper[4875]: I1007 09:08:57.756854 4875 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-42sts\" (UniqueName: \"kubernetes.io/projected/e8a3118f-3456-4878-88d3-69fb07e4d6db-kube-api-access-42sts\") pod \"community-operators-4bf8n\" (UID: \"e8a3118f-3456-4878-88d3-69fb07e4d6db\") " pod="openshift-marketplace/community-operators-4bf8n" Oct 07 09:08:57 crc kubenswrapper[4875]: I1007 09:08:57.817505 4875 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-4bf8n" Oct 07 09:08:58 crc kubenswrapper[4875]: I1007 09:08:58.386811 4875 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-4bf8n"] Oct 07 09:08:58 crc kubenswrapper[4875]: I1007 09:08:58.543812 4875 generic.go:334] "Generic (PLEG): container finished" podID="5595ca31-670e-4b1c-bd83-4c1e15d89ed3" containerID="5e7c69300f98e56de9e121f59c9fe9a00a5517f731bfbdffad7f30bdfbfddec5" exitCode=0 Oct 07 09:08:58 crc kubenswrapper[4875]: I1007 09:08:58.544751 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-56xrp" event={"ID":"5595ca31-670e-4b1c-bd83-4c1e15d89ed3","Type":"ContainerDied","Data":"5e7c69300f98e56de9e121f59c9fe9a00a5517f731bfbdffad7f30bdfbfddec5"} Oct 07 09:08:58 crc kubenswrapper[4875]: I1007 09:08:58.550092 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4bf8n" event={"ID":"e8a3118f-3456-4878-88d3-69fb07e4d6db","Type":"ContainerStarted","Data":"39cf77862e9379e393658d6018c31e2dd33083c96dfe1749092da5344bcf3f7d"} Oct 07 09:08:59 crc kubenswrapper[4875]: I1007 09:08:59.558779 4875 generic.go:334] "Generic (PLEG): container finished" podID="e8a3118f-3456-4878-88d3-69fb07e4d6db" containerID="38715fc4606f4949d2435f3073f13f96490b4edc311dc7a8eaeea66f86a075f7" exitCode=0 Oct 07 09:08:59 crc kubenswrapper[4875]: I1007 09:08:59.558829 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4bf8n" event={"ID":"e8a3118f-3456-4878-88d3-69fb07e4d6db","Type":"ContainerDied","Data":"38715fc4606f4949d2435f3073f13f96490b4edc311dc7a8eaeea66f86a075f7"} Oct 07 09:08:59 crc kubenswrapper[4875]: I1007 09:08:59.561813 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-56xrp" event={"ID":"5595ca31-670e-4b1c-bd83-4c1e15d89ed3","Type":"ContainerStarted","Data":"ecd4d16a7b3bbe40753fbf72646549316e47c9858cc7f1fbd5d2bd756af839ba"} Oct 07 09:08:59 crc kubenswrapper[4875]: I1007 09:08:59.598091 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-56xrp" podStartSLOduration=3.096532055 podStartE2EDuration="5.598069713s" podCreationTimestamp="2025-10-07 09:08:54 +0000 UTC" firstStartedPulling="2025-10-07 09:08:56.516706854 +0000 UTC m=+4361.476477397" lastFinishedPulling="2025-10-07 09:08:59.018244512 +0000 UTC m=+4363.978015055" observedRunningTime="2025-10-07 09:08:59.59237246 +0000 UTC m=+4364.552143013" watchObservedRunningTime="2025-10-07 09:08:59.598069713 +0000 UTC m=+4364.557840266" Oct 07 09:09:01 crc kubenswrapper[4875]: I1007 09:09:01.579769 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4bf8n" event={"ID":"e8a3118f-3456-4878-88d3-69fb07e4d6db","Type":"ContainerStarted","Data":"3be06e80cb7dcf62fc13dcf65152186a7ce9d103924b24d73a9005422effb56d"} Oct 07 09:09:02 crc kubenswrapper[4875]: I1007 09:09:02.590669 4875 generic.go:334] "Generic (PLEG): container finished" podID="e8a3118f-3456-4878-88d3-69fb07e4d6db" containerID="3be06e80cb7dcf62fc13dcf65152186a7ce9d103924b24d73a9005422effb56d" exitCode=0 Oct 07 09:09:02 crc kubenswrapper[4875]: I1007 09:09:02.590734 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4bf8n" event={"ID":"e8a3118f-3456-4878-88d3-69fb07e4d6db","Type":"ContainerDied","Data":"3be06e80cb7dcf62fc13dcf65152186a7ce9d103924b24d73a9005422effb56d"} Oct 07 09:09:03 crc kubenswrapper[4875]: I1007 09:09:03.602530 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4bf8n" event={"ID":"e8a3118f-3456-4878-88d3-69fb07e4d6db","Type":"ContainerStarted","Data":"377c39cf217dee84eeac90d9d1e0638c0c37cc674d4585ce586e6594ef6db30c"} Oct 07 09:09:03 crc kubenswrapper[4875]: I1007 09:09:03.639984 4875 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-4bf8n" podStartSLOduration=3.143291325 podStartE2EDuration="6.639967749s" podCreationTimestamp="2025-10-07 09:08:57 +0000 UTC" firstStartedPulling="2025-10-07 09:08:59.560397665 +0000 UTC m=+4364.520168208" lastFinishedPulling="2025-10-07 09:09:03.057074089 +0000 UTC m=+4368.016844632" observedRunningTime="2025-10-07 09:09:03.617704185 +0000 UTC m=+4368.577474738" watchObservedRunningTime="2025-10-07 09:09:03.639967749 +0000 UTC m=+4368.599738292" Oct 07 09:09:03 crc kubenswrapper[4875]: I1007 09:09:03.699154 4875 scope.go:117] "RemoveContainer" containerID="21bf6bf5228a798ba24e02287093bd82c692d5a50ac902514b7d5b0597423fe9" Oct 07 09:09:03 crc kubenswrapper[4875]: E1007 09:09:03.699409 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hx68m_openshift-machine-config-operator(3928c10c-c3da-41eb-96b2-629d67cfb31f)\"" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" podUID="3928c10c-c3da-41eb-96b2-629d67cfb31f" Oct 07 09:09:05 crc kubenswrapper[4875]: I1007 09:09:05.201225 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-56xrp" Oct 07 09:09:05 crc kubenswrapper[4875]: I1007 09:09:05.201575 4875 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-56xrp" Oct 07 09:09:05 crc kubenswrapper[4875]: I1007 09:09:05.246508 4875 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-56xrp" Oct 07 09:09:05 crc kubenswrapper[4875]: I1007 09:09:05.669483 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-56xrp" Oct 07 09:09:06 crc kubenswrapper[4875]: I1007 09:09:06.850918 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-56xrp"] Oct 07 09:09:07 crc kubenswrapper[4875]: I1007 09:09:07.643467 4875 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-56xrp" podUID="5595ca31-670e-4b1c-bd83-4c1e15d89ed3" containerName="registry-server" containerID="cri-o://ecd4d16a7b3bbe40753fbf72646549316e47c9858cc7f1fbd5d2bd756af839ba" gracePeriod=2 Oct 07 09:09:07 crc kubenswrapper[4875]: I1007 09:09:07.817732 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-4bf8n" Oct 07 09:09:07 crc kubenswrapper[4875]: I1007 09:09:07.817849 4875 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-4bf8n" Oct 07 09:09:07 crc kubenswrapper[4875]: I1007 09:09:07.864120 4875 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-4bf8n" Oct 07 09:09:08 crc kubenswrapper[4875]: I1007 09:09:08.313361 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-56xrp" Oct 07 09:09:08 crc kubenswrapper[4875]: I1007 09:09:08.452209 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5595ca31-670e-4b1c-bd83-4c1e15d89ed3-catalog-content\") pod \"5595ca31-670e-4b1c-bd83-4c1e15d89ed3\" (UID: \"5595ca31-670e-4b1c-bd83-4c1e15d89ed3\") " Oct 07 09:09:08 crc kubenswrapper[4875]: I1007 09:09:08.452549 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5595ca31-670e-4b1c-bd83-4c1e15d89ed3-utilities\") pod \"5595ca31-670e-4b1c-bd83-4c1e15d89ed3\" (UID: \"5595ca31-670e-4b1c-bd83-4c1e15d89ed3\") " Oct 07 09:09:08 crc kubenswrapper[4875]: I1007 09:09:08.452643 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wsgk4\" (UniqueName: \"kubernetes.io/projected/5595ca31-670e-4b1c-bd83-4c1e15d89ed3-kube-api-access-wsgk4\") pod \"5595ca31-670e-4b1c-bd83-4c1e15d89ed3\" (UID: \"5595ca31-670e-4b1c-bd83-4c1e15d89ed3\") " Oct 07 09:09:08 crc kubenswrapper[4875]: I1007 09:09:08.453471 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5595ca31-670e-4b1c-bd83-4c1e15d89ed3-utilities" (OuterVolumeSpecName: "utilities") pod "5595ca31-670e-4b1c-bd83-4c1e15d89ed3" (UID: "5595ca31-670e-4b1c-bd83-4c1e15d89ed3"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 09:09:08 crc kubenswrapper[4875]: I1007 09:09:08.473647 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5595ca31-670e-4b1c-bd83-4c1e15d89ed3-kube-api-access-wsgk4" (OuterVolumeSpecName: "kube-api-access-wsgk4") pod "5595ca31-670e-4b1c-bd83-4c1e15d89ed3" (UID: "5595ca31-670e-4b1c-bd83-4c1e15d89ed3"). InnerVolumeSpecName "kube-api-access-wsgk4". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 09:09:08 crc kubenswrapper[4875]: I1007 09:09:08.516723 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5595ca31-670e-4b1c-bd83-4c1e15d89ed3-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5595ca31-670e-4b1c-bd83-4c1e15d89ed3" (UID: "5595ca31-670e-4b1c-bd83-4c1e15d89ed3"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 09:09:08 crc kubenswrapper[4875]: I1007 09:09:08.556421 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wsgk4\" (UniqueName: \"kubernetes.io/projected/5595ca31-670e-4b1c-bd83-4c1e15d89ed3-kube-api-access-wsgk4\") on node \"crc\" DevicePath \"\"" Oct 07 09:09:08 crc kubenswrapper[4875]: I1007 09:09:08.556486 4875 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5595ca31-670e-4b1c-bd83-4c1e15d89ed3-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 07 09:09:08 crc kubenswrapper[4875]: I1007 09:09:08.556502 4875 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5595ca31-670e-4b1c-bd83-4c1e15d89ed3-utilities\") on node \"crc\" DevicePath \"\"" Oct 07 09:09:08 crc kubenswrapper[4875]: I1007 09:09:08.656439 4875 generic.go:334] "Generic (PLEG): container finished" podID="5595ca31-670e-4b1c-bd83-4c1e15d89ed3" containerID="ecd4d16a7b3bbe40753fbf72646549316e47c9858cc7f1fbd5d2bd756af839ba" exitCode=0 Oct 07 09:09:08 crc kubenswrapper[4875]: I1007 09:09:08.657017 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-56xrp" event={"ID":"5595ca31-670e-4b1c-bd83-4c1e15d89ed3","Type":"ContainerDied","Data":"ecd4d16a7b3bbe40753fbf72646549316e47c9858cc7f1fbd5d2bd756af839ba"} Oct 07 09:09:08 crc kubenswrapper[4875]: I1007 09:09:08.657134 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-56xrp" event={"ID":"5595ca31-670e-4b1c-bd83-4c1e15d89ed3","Type":"ContainerDied","Data":"e8753ac81f51c78890b7494d77d41ccf655e60b4871092df64dd9a2a70b98bae"} Oct 07 09:09:08 crc kubenswrapper[4875]: I1007 09:09:08.657159 4875 scope.go:117] "RemoveContainer" containerID="ecd4d16a7b3bbe40753fbf72646549316e47c9858cc7f1fbd5d2bd756af839ba" Oct 07 09:09:08 crc kubenswrapper[4875]: I1007 09:09:08.657051 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-56xrp" Oct 07 09:09:08 crc kubenswrapper[4875]: I1007 09:09:08.688846 4875 scope.go:117] "RemoveContainer" containerID="5e7c69300f98e56de9e121f59c9fe9a00a5517f731bfbdffad7f30bdfbfddec5" Oct 07 09:09:08 crc kubenswrapper[4875]: I1007 09:09:08.711925 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-56xrp"] Oct 07 09:09:08 crc kubenswrapper[4875]: I1007 09:09:08.714671 4875 scope.go:117] "RemoveContainer" containerID="15d177f4c66c6c2f84719c9e3237b6887004cb8421df266219ef3b95001431f2" Oct 07 09:09:08 crc kubenswrapper[4875]: I1007 09:09:08.724782 4875 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-56xrp"] Oct 07 09:09:08 crc kubenswrapper[4875]: I1007 09:09:08.736837 4875 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-4bf8n" Oct 07 09:09:08 crc kubenswrapper[4875]: I1007 09:09:08.772335 4875 scope.go:117] "RemoveContainer" containerID="ecd4d16a7b3bbe40753fbf72646549316e47c9858cc7f1fbd5d2bd756af839ba" Oct 07 09:09:08 crc kubenswrapper[4875]: E1007 09:09:08.773012 4875 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ecd4d16a7b3bbe40753fbf72646549316e47c9858cc7f1fbd5d2bd756af839ba\": container with ID starting with ecd4d16a7b3bbe40753fbf72646549316e47c9858cc7f1fbd5d2bd756af839ba not found: ID does not exist" containerID="ecd4d16a7b3bbe40753fbf72646549316e47c9858cc7f1fbd5d2bd756af839ba" Oct 07 09:09:08 crc kubenswrapper[4875]: I1007 09:09:08.773102 4875 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ecd4d16a7b3bbe40753fbf72646549316e47c9858cc7f1fbd5d2bd756af839ba"} err="failed to get container status \"ecd4d16a7b3bbe40753fbf72646549316e47c9858cc7f1fbd5d2bd756af839ba\": rpc error: code = NotFound desc = could not find container \"ecd4d16a7b3bbe40753fbf72646549316e47c9858cc7f1fbd5d2bd756af839ba\": container with ID starting with ecd4d16a7b3bbe40753fbf72646549316e47c9858cc7f1fbd5d2bd756af839ba not found: ID does not exist" Oct 07 09:09:08 crc kubenswrapper[4875]: I1007 09:09:08.773140 4875 scope.go:117] "RemoveContainer" containerID="5e7c69300f98e56de9e121f59c9fe9a00a5517f731bfbdffad7f30bdfbfddec5" Oct 07 09:09:08 crc kubenswrapper[4875]: E1007 09:09:08.773489 4875 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5e7c69300f98e56de9e121f59c9fe9a00a5517f731bfbdffad7f30bdfbfddec5\": container with ID starting with 5e7c69300f98e56de9e121f59c9fe9a00a5517f731bfbdffad7f30bdfbfddec5 not found: ID does not exist" containerID="5e7c69300f98e56de9e121f59c9fe9a00a5517f731bfbdffad7f30bdfbfddec5" Oct 07 09:09:08 crc kubenswrapper[4875]: I1007 09:09:08.773520 4875 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5e7c69300f98e56de9e121f59c9fe9a00a5517f731bfbdffad7f30bdfbfddec5"} err="failed to get container status \"5e7c69300f98e56de9e121f59c9fe9a00a5517f731bfbdffad7f30bdfbfddec5\": rpc error: code = NotFound desc = could not find container \"5e7c69300f98e56de9e121f59c9fe9a00a5517f731bfbdffad7f30bdfbfddec5\": container with ID starting with 5e7c69300f98e56de9e121f59c9fe9a00a5517f731bfbdffad7f30bdfbfddec5 not found: ID does not exist" Oct 07 09:09:08 crc kubenswrapper[4875]: I1007 09:09:08.773536 4875 scope.go:117] "RemoveContainer" containerID="15d177f4c66c6c2f84719c9e3237b6887004cb8421df266219ef3b95001431f2" Oct 07 09:09:08 crc kubenswrapper[4875]: E1007 09:09:08.773743 4875 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"15d177f4c66c6c2f84719c9e3237b6887004cb8421df266219ef3b95001431f2\": container with ID starting with 15d177f4c66c6c2f84719c9e3237b6887004cb8421df266219ef3b95001431f2 not found: ID does not exist" containerID="15d177f4c66c6c2f84719c9e3237b6887004cb8421df266219ef3b95001431f2" Oct 07 09:09:08 crc kubenswrapper[4875]: I1007 09:09:08.773775 4875 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"15d177f4c66c6c2f84719c9e3237b6887004cb8421df266219ef3b95001431f2"} err="failed to get container status \"15d177f4c66c6c2f84719c9e3237b6887004cb8421df266219ef3b95001431f2\": rpc error: code = NotFound desc = could not find container \"15d177f4c66c6c2f84719c9e3237b6887004cb8421df266219ef3b95001431f2\": container with ID starting with 15d177f4c66c6c2f84719c9e3237b6887004cb8421df266219ef3b95001431f2 not found: ID does not exist" Oct 07 09:09:09 crc kubenswrapper[4875]: I1007 09:09:09.707585 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5595ca31-670e-4b1c-bd83-4c1e15d89ed3" path="/var/lib/kubelet/pods/5595ca31-670e-4b1c-bd83-4c1e15d89ed3/volumes" Oct 07 09:09:10 crc kubenswrapper[4875]: I1007 09:09:10.049432 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-4bf8n"] Oct 07 09:09:10 crc kubenswrapper[4875]: I1007 09:09:10.675516 4875 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-4bf8n" podUID="e8a3118f-3456-4878-88d3-69fb07e4d6db" containerName="registry-server" containerID="cri-o://377c39cf217dee84eeac90d9d1e0638c0c37cc674d4585ce586e6594ef6db30c" gracePeriod=2 Oct 07 09:09:11 crc kubenswrapper[4875]: I1007 09:09:11.271605 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-4bf8n" Oct 07 09:09:11 crc kubenswrapper[4875]: I1007 09:09:11.415212 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-42sts\" (UniqueName: \"kubernetes.io/projected/e8a3118f-3456-4878-88d3-69fb07e4d6db-kube-api-access-42sts\") pod \"e8a3118f-3456-4878-88d3-69fb07e4d6db\" (UID: \"e8a3118f-3456-4878-88d3-69fb07e4d6db\") " Oct 07 09:09:11 crc kubenswrapper[4875]: I1007 09:09:11.415329 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e8a3118f-3456-4878-88d3-69fb07e4d6db-utilities\") pod \"e8a3118f-3456-4878-88d3-69fb07e4d6db\" (UID: \"e8a3118f-3456-4878-88d3-69fb07e4d6db\") " Oct 07 09:09:11 crc kubenswrapper[4875]: I1007 09:09:11.415424 4875 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e8a3118f-3456-4878-88d3-69fb07e4d6db-catalog-content\") pod \"e8a3118f-3456-4878-88d3-69fb07e4d6db\" (UID: \"e8a3118f-3456-4878-88d3-69fb07e4d6db\") " Oct 07 09:09:11 crc kubenswrapper[4875]: I1007 09:09:11.416598 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e8a3118f-3456-4878-88d3-69fb07e4d6db-utilities" (OuterVolumeSpecName: "utilities") pod "e8a3118f-3456-4878-88d3-69fb07e4d6db" (UID: "e8a3118f-3456-4878-88d3-69fb07e4d6db"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 09:09:11 crc kubenswrapper[4875]: I1007 09:09:11.424018 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e8a3118f-3456-4878-88d3-69fb07e4d6db-kube-api-access-42sts" (OuterVolumeSpecName: "kube-api-access-42sts") pod "e8a3118f-3456-4878-88d3-69fb07e4d6db" (UID: "e8a3118f-3456-4878-88d3-69fb07e4d6db"). InnerVolumeSpecName "kube-api-access-42sts". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 09:09:11 crc kubenswrapper[4875]: I1007 09:09:11.474403 4875 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e8a3118f-3456-4878-88d3-69fb07e4d6db-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e8a3118f-3456-4878-88d3-69fb07e4d6db" (UID: "e8a3118f-3456-4878-88d3-69fb07e4d6db"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 09:09:11 crc kubenswrapper[4875]: I1007 09:09:11.518199 4875 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-42sts\" (UniqueName: \"kubernetes.io/projected/e8a3118f-3456-4878-88d3-69fb07e4d6db-kube-api-access-42sts\") on node \"crc\" DevicePath \"\"" Oct 07 09:09:11 crc kubenswrapper[4875]: I1007 09:09:11.518393 4875 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e8a3118f-3456-4878-88d3-69fb07e4d6db-utilities\") on node \"crc\" DevicePath \"\"" Oct 07 09:09:11 crc kubenswrapper[4875]: I1007 09:09:11.518409 4875 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e8a3118f-3456-4878-88d3-69fb07e4d6db-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 07 09:09:11 crc kubenswrapper[4875]: I1007 09:09:11.693685 4875 generic.go:334] "Generic (PLEG): container finished" podID="e8a3118f-3456-4878-88d3-69fb07e4d6db" containerID="377c39cf217dee84eeac90d9d1e0638c0c37cc674d4585ce586e6594ef6db30c" exitCode=0 Oct 07 09:09:11 crc kubenswrapper[4875]: I1007 09:09:11.693965 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4bf8n" event={"ID":"e8a3118f-3456-4878-88d3-69fb07e4d6db","Type":"ContainerDied","Data":"377c39cf217dee84eeac90d9d1e0638c0c37cc674d4585ce586e6594ef6db30c"} Oct 07 09:09:11 crc kubenswrapper[4875]: I1007 09:09:11.694131 4875 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4bf8n" event={"ID":"e8a3118f-3456-4878-88d3-69fb07e4d6db","Type":"ContainerDied","Data":"39cf77862e9379e393658d6018c31e2dd33083c96dfe1749092da5344bcf3f7d"} Oct 07 09:09:11 crc kubenswrapper[4875]: I1007 09:09:11.694171 4875 scope.go:117] "RemoveContainer" containerID="377c39cf217dee84eeac90d9d1e0638c0c37cc674d4585ce586e6594ef6db30c" Oct 07 09:09:11 crc kubenswrapper[4875]: I1007 09:09:11.693981 4875 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-4bf8n" Oct 07 09:09:11 crc kubenswrapper[4875]: I1007 09:09:11.730093 4875 scope.go:117] "RemoveContainer" containerID="3be06e80cb7dcf62fc13dcf65152186a7ce9d103924b24d73a9005422effb56d" Oct 07 09:09:11 crc kubenswrapper[4875]: I1007 09:09:11.734943 4875 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-4bf8n"] Oct 07 09:09:11 crc kubenswrapper[4875]: I1007 09:09:11.741324 4875 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-4bf8n"] Oct 07 09:09:12 crc kubenswrapper[4875]: I1007 09:09:12.041521 4875 scope.go:117] "RemoveContainer" containerID="38715fc4606f4949d2435f3073f13f96490b4edc311dc7a8eaeea66f86a075f7" Oct 07 09:09:12 crc kubenswrapper[4875]: I1007 09:09:12.088212 4875 scope.go:117] "RemoveContainer" containerID="377c39cf217dee84eeac90d9d1e0638c0c37cc674d4585ce586e6594ef6db30c" Oct 07 09:09:12 crc kubenswrapper[4875]: E1007 09:09:12.088894 4875 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"377c39cf217dee84eeac90d9d1e0638c0c37cc674d4585ce586e6594ef6db30c\": container with ID starting with 377c39cf217dee84eeac90d9d1e0638c0c37cc674d4585ce586e6594ef6db30c not found: ID does not exist" containerID="377c39cf217dee84eeac90d9d1e0638c0c37cc674d4585ce586e6594ef6db30c" Oct 07 09:09:12 crc kubenswrapper[4875]: I1007 09:09:12.088964 4875 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"377c39cf217dee84eeac90d9d1e0638c0c37cc674d4585ce586e6594ef6db30c"} err="failed to get container status \"377c39cf217dee84eeac90d9d1e0638c0c37cc674d4585ce586e6594ef6db30c\": rpc error: code = NotFound desc = could not find container \"377c39cf217dee84eeac90d9d1e0638c0c37cc674d4585ce586e6594ef6db30c\": container with ID starting with 377c39cf217dee84eeac90d9d1e0638c0c37cc674d4585ce586e6594ef6db30c not found: ID does not exist" Oct 07 09:09:12 crc kubenswrapper[4875]: I1007 09:09:12.089000 4875 scope.go:117] "RemoveContainer" containerID="3be06e80cb7dcf62fc13dcf65152186a7ce9d103924b24d73a9005422effb56d" Oct 07 09:09:12 crc kubenswrapper[4875]: E1007 09:09:12.089515 4875 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3be06e80cb7dcf62fc13dcf65152186a7ce9d103924b24d73a9005422effb56d\": container with ID starting with 3be06e80cb7dcf62fc13dcf65152186a7ce9d103924b24d73a9005422effb56d not found: ID does not exist" containerID="3be06e80cb7dcf62fc13dcf65152186a7ce9d103924b24d73a9005422effb56d" Oct 07 09:09:12 crc kubenswrapper[4875]: I1007 09:09:12.089549 4875 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3be06e80cb7dcf62fc13dcf65152186a7ce9d103924b24d73a9005422effb56d"} err="failed to get container status \"3be06e80cb7dcf62fc13dcf65152186a7ce9d103924b24d73a9005422effb56d\": rpc error: code = NotFound desc = could not find container \"3be06e80cb7dcf62fc13dcf65152186a7ce9d103924b24d73a9005422effb56d\": container with ID starting with 3be06e80cb7dcf62fc13dcf65152186a7ce9d103924b24d73a9005422effb56d not found: ID does not exist" Oct 07 09:09:12 crc kubenswrapper[4875]: I1007 09:09:12.089572 4875 scope.go:117] "RemoveContainer" containerID="38715fc4606f4949d2435f3073f13f96490b4edc311dc7a8eaeea66f86a075f7" Oct 07 09:09:12 crc kubenswrapper[4875]: E1007 09:09:12.089851 4875 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"38715fc4606f4949d2435f3073f13f96490b4edc311dc7a8eaeea66f86a075f7\": container with ID starting with 38715fc4606f4949d2435f3073f13f96490b4edc311dc7a8eaeea66f86a075f7 not found: ID does not exist" containerID="38715fc4606f4949d2435f3073f13f96490b4edc311dc7a8eaeea66f86a075f7" Oct 07 09:09:12 crc kubenswrapper[4875]: I1007 09:09:12.089922 4875 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"38715fc4606f4949d2435f3073f13f96490b4edc311dc7a8eaeea66f86a075f7"} err="failed to get container status \"38715fc4606f4949d2435f3073f13f96490b4edc311dc7a8eaeea66f86a075f7\": rpc error: code = NotFound desc = could not find container \"38715fc4606f4949d2435f3073f13f96490b4edc311dc7a8eaeea66f86a075f7\": container with ID starting with 38715fc4606f4949d2435f3073f13f96490b4edc311dc7a8eaeea66f86a075f7 not found: ID does not exist" Oct 07 09:09:13 crc kubenswrapper[4875]: I1007 09:09:13.706645 4875 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e8a3118f-3456-4878-88d3-69fb07e4d6db" path="/var/lib/kubelet/pods/e8a3118f-3456-4878-88d3-69fb07e4d6db/volumes" Oct 07 09:09:18 crc kubenswrapper[4875]: I1007 09:09:18.698031 4875 scope.go:117] "RemoveContainer" containerID="21bf6bf5228a798ba24e02287093bd82c692d5a50ac902514b7d5b0597423fe9" Oct 07 09:09:18 crc kubenswrapper[4875]: E1007 09:09:18.698592 4875 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hx68m_openshift-machine-config-operator(3928c10c-c3da-41eb-96b2-629d67cfb31f)\"" pod="openshift-machine-config-operator/machine-config-daemon-hx68m" podUID="3928c10c-c3da-41eb-96b2-629d67cfb31f" var/home/core/zuul-output/logs/crc-cloud-workdir-crc-all-logs.tar.gz0000644000175000000000000000005515071154314024446 0ustar coreroot‹íÁ  ÷Om7 €7šÞ'(var/home/core/zuul-output/logs/crc-cloud/0000755000175000000000000000000015071154315017364 5ustar corerootvar/home/core/zuul-output/artifacts/0000755000175000017500000000000015071143311016501 5ustar corecorevar/home/core/zuul-output/docs/0000755000175000017500000000000015071143311015451 5ustar corecore